1/*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <stdlib.h>
18
19#include <algorithm>
20
21#include <binder/ProcessState.h>
22#include <gtest/gtest.h>
23#include <gui/Surface.h>
24#include <media/ICrypto.h>
25#include <media/MediaCodecBuffer.h>
26#include <media/hardware/VideoAPI.h>
27#include <media/stagefright/MediaCodec.h>
28#include <media/stagefright/MediaCodecConstants.h>
29#include <media/stagefright/foundation/ABuffer.h>
30#include <media/stagefright/foundation/ALooper.h>
31#include <media/stagefright/foundation/AMessage.h>
32
33namespace android {
34
35class MediaCodecSanityTest : public ::testing::Test {
36public:
37    MediaCodecSanityTest()
38        : looper(new ALooper),
39          cfg(new AMessage),
40          ifmt(new AMessage),
41          ofmt(new AMessage) {
42        ProcessState::self()->startThreadPool();
43        looper->start();
44    }
45
46    ~MediaCodecSanityTest() {
47        if (codec != nullptr) {
48            codec->release();
49        }
50        looper->stop();
51    }
52
53    sp<ALooper> looper;
54    sp<MediaCodec> codec;
55    sp<AMessage> cfg;
56    sp<AMessage> ifmt;
57    sp<AMessage> ofmt;
58};
59
60const static size_t kLinearBufferSize = 1048576;
61
62// data for a codec input frame
63struct FrameData {
64    const uint8_t *data;
65    size_t size;
66    template<size_t N>
67    constexpr FrameData(const uint8_t(&data_)[N]) : data(data_), size(N) { }
68};
69
70// one yellow frame of 240x180 (albeit 4:4:4)
71const uint8_t avcStream_A1[] = { // IDR frame
72    0x00, 0x00, 0x00, 0x01, 0x67, 0x64, 0x00, 0x0d, 0xac, 0xd9, 0x41, 0x41, 0xfa, 0x10, 0x00, 0x00,
73    0x03, 0x00, 0x10, 0x00, 0x00, 0x03, 0x03, 0x20, 0xf1, 0x42, 0x99, 0x60,
74
75    0x00, 0x00, 0x00, 0x01, 0x68, 0xeb, 0xe3, 0xcb, 0x22, 0xc0,
76
77    0x00, 0x00, 0x01, 0x65, 0x88, 0x84, 0x00, 0x2b, 0xff, 0xfe, 0xd8, 0xe7, 0xf3, 0x2c, 0xa5, 0x60,
78    0xca, 0xbb, 0xf1, 0x5c, 0x44, 0x7c, 0x9a, 0xa5, 0xc3, 0xab, 0x2f, 0x77, 0x0a, 0x94, 0x0d, 0x19,
79    0x43, 0x3b, 0x4f, 0x25, 0xea, 0x66, 0x00, 0x01, 0x24, 0xcd, 0x35, 0x5f, 0xc2, 0x34, 0x89, 0xd1,
80    0xa5, 0x60, 0x09, 0x98, 0x00, 0x01, 0x1b, 0x0e, 0xcb, 0x0d, 0x04, 0x86, 0x94, 0xe2, 0x32, 0x3c,
81    0xdd, 0x0f,
82};
83
84FrameData avcStream_A[] __unused = { avcStream_A1 };
85
86// AVC stream of 2 yellow frames (240x180)
87const uint8_t avcStream_B1[] = { // IDR frame
88    0x00, 0x00, 0x00, 0x01, 0x67, 0x64, 0x00, 0x0c, 0xac, 0xd9, 0x41, 0x41, 0xfa, 0x10, 0x00, 0x00,
89    0x03, 0x00, 0x10, 0x00, 0x00, 0x03, 0x02, 0x80, 0xf1, 0x42, 0x99, 0x60,
90
91    0x00, 0x00, 0x00, 0x01, 0x68, 0xeb, 0xe3, 0xcb, 0x22, 0xc0,
92
93    0x00, 0x00, 0x01, 0x65, 0x88, 0x84, 0x00, 0x33, 0xff, 0xfe, 0xdf, 0x32, 0xf8, 0x14, 0xd6, 0x25,
94    0xd0, 0x74, 0x42, 0x50, 0x84, 0x6f, 0xf4, 0xc2, 0x5c, 0x76, 0x37, 0x17, 0x72, 0xac, 0x52, 0xfc,
95    0xd6, 0x1f, 0xd2, 0xd0, 0x60, 0xb2, 0x20, 0x00, 0x10, 0x3d, 0x2a, 0xc0, 0xe4, 0x27, 0xcb, 0xce,
96    0xea, 0x25, 0x00, 0x81, 0x00, 0x00, 0x0f, 0x40, 0xbc, 0x81, 0x15, 0xc1, 0x65, 0x20, 0x80, 0x81,
97    0x7a, 0x57, 0x51,
98};
99
100const uint8_t avcStream_B2[] = { // P frame
101    0x00, 0x00, 0x00, 0x01, 0x41, 0x9a, 0x21, 0x6c, 0x42, 0xbf, 0xfe, 0x38, 0x40, 0x00, 0x0d, 0x48,
102};
103
104FrameData avcStream_B[] = { avcStream_B1, avcStream_B2 };
105
106class MediaCodecInputBufferSizeTest : public MediaCodecSanityTest,
107        public ::testing::WithParamInterface<int32_t> {
108};
109
110TEST_P(MediaCodecInputBufferSizeTest, TestAvcDecoder) {
111    codec = MediaCodec::CreateByComponentName(looper, "c2.android.avc.decoder");
112    cfg->setInt32("width", 320);
113    cfg->setInt32("height", 240);
114    cfg->setString("mime", MIMETYPE_VIDEO_AVC);
115
116    const int32_t InputSize = GetParam();
117    if (InputSize >= 0) {
118        cfg->setInt32("max-input-size", InputSize);
119    }
120
121    EXPECT_EQ(codec->configure(cfg, nullptr, nullptr, 0), OK);
122    EXPECT_EQ(codec->getInputFormat(&ifmt), OK);
123    int32_t maxInputSize;
124    ASSERT_TRUE(ifmt->findInt32("max-input-size", &maxInputSize));
125    if (InputSize > 0) {
126        EXPECT_EQ(maxInputSize, InputSize);
127    } else {
128        EXPECT_GE(maxInputSize, 1 << 20); // 1 MB
129    }
130    EXPECT_EQ(codec->start(),  OK);
131    size_t ix;
132    EXPECT_EQ(codec->dequeueInputBuffer(&ix, 1000000), OK);
133    sp<MediaCodecBuffer> buf;
134    EXPECT_EQ(codec->getInputBuffer(ix, &buf),  OK);
135    EXPECT_GE(buf->size(), (size_t)maxInputSize);
136    EXPECT_LE(buf->size(), (size_t)maxInputSize + 4096u);
137}
138
139TEST_P(MediaCodecInputBufferSizeTest, TestVideoDecoder) {
140    codec = MediaCodec::CreateByComponentName(looper, "c2.android.vp8.decoder");
141    cfg->setInt32("width", 320);
142    cfg->setInt32("height", 240);
143    cfg->setString("mime", MIMETYPE_VIDEO_VP8);
144
145    const int32_t InputSize = GetParam();
146    if (InputSize >= 0) {
147        cfg->setInt32("max-input-size", InputSize);
148    }
149
150    EXPECT_EQ(codec->configure(cfg, nullptr, nullptr, 0), OK);
151    EXPECT_EQ(codec->getInputFormat(&ifmt), OK);
152    int32_t maxInputSize;
153    ASSERT_TRUE(ifmt->findInt32("max-input-size", &maxInputSize));
154    if (InputSize > 0) {
155        EXPECT_EQ(maxInputSize, InputSize);
156    } else {
157        EXPECT_GE(maxInputSize, 1 << 20); // 1 MB
158    }
159    EXPECT_EQ(codec->start(),  OK);
160    size_t ix;
161    EXPECT_EQ(codec->dequeueInputBuffer(&ix, 1000000), OK);
162    sp<MediaCodecBuffer> buf;
163    EXPECT_EQ(codec->getInputBuffer(ix, &buf),  OK);
164    EXPECT_GE(buf->size(), (size_t)maxInputSize);
165    EXPECT_LE(buf->size(), (size_t)maxInputSize + 4096u);
166}
167
168TEST_P(MediaCodecInputBufferSizeTest, TestAudioDecoder) {
169    codec = MediaCodec::CreateByComponentName(looper, "c2.android.aac.decoder");
170    cfg->setInt32("sample-rate", 44100);
171    cfg->setInt32("channel-count", 2);
172    cfg->setString("mime", MIMETYPE_AUDIO_AAC);
173
174    const int32_t InputSize = GetParam();
175    if (InputSize >= 0) {
176        cfg->setInt32("max-input-size", InputSize);
177    }
178
179    EXPECT_EQ(codec->configure(cfg, nullptr, nullptr, 0), OK);
180    EXPECT_EQ(codec->getInputFormat(&ifmt), OK);
181    int32_t maxInputSize;
182    if (InputSize > 0) {
183        ASSERT_TRUE(ifmt->findInt32("max-input-size", &maxInputSize));
184        EXPECT_EQ(maxInputSize, InputSize);
185    } else {
186        if (ifmt->findInt32("max-input-size", &maxInputSize)) {
187            EXPECT_EQ(maxInputSize, 1 << 19); // 512 KB
188        }
189        maxInputSize = kLinearBufferSize; // input size is set by channel
190    }
191
192    EXPECT_EQ(codec->start(),  OK);
193    size_t ix;
194    EXPECT_EQ(codec->dequeueInputBuffer(&ix, 1000000), OK);
195    sp<MediaCodecBuffer> buf;
196    EXPECT_EQ(codec->getInputBuffer(ix, &buf),  OK);
197    EXPECT_GE(buf->size(), (size_t)maxInputSize);
198    EXPECT_LE(buf->size(), (size_t)maxInputSize + 4096u);
199}
200
201INSTANTIATE_TEST_CASE_P(InputSizes, MediaCodecInputBufferSizeTest, ::testing::Values(-1, 1234, 12345678));
202
203TEST_F(MediaCodecSanityTest, TestAvcDecoderHdrStaticInfo) {
204    codec = MediaCodec::CreateByComponentName(looper, "c2.android.avc.decoder");
205    cfg->setInt32("width", 320);
206    cfg->setInt32("height", 240);
207    cfg->setString("mime", MIMETYPE_VIDEO_AVC);
208    HDRStaticInfo info = { .mID = HDRStaticInfo::kType1, .sType1 = {
209        .mR = { .x = 35400, .y = 14600 }, .mG = { .x = 8500,  .y = 39850 },
210        .mB = { .x = 6550,  .y =  2300 }, .mW = { .x = 15635, .y = 16450 },
211        .mMaxDisplayLuminance = 1000, .mMinDisplayLuminance = 1000,
212        .mMaxContentLightLevel = 1000, .mMaxFrameAverageLightLevel = 120 }
213    };
214    cfg->setBuffer("hdr-static-info", ABuffer::CreateAsCopy(&info, sizeof(info)));
215
216    EXPECT_EQ(codec->configure(cfg, nullptr, nullptr, 0), OK);
217    EXPECT_EQ(codec->getOutputFormat(&ofmt), OK);
218    sp<ABuffer> oinfo;
219    ASSERT_TRUE(ofmt->findBuffer("hdr-static-info", &oinfo));
220    ASSERT_EQ(oinfo->size(), sizeof(info));
221    EXPECT_EQ(memcmp(oinfo->data(), &info, sizeof(info)),  0);
222
223    EXPECT_EQ(codec->start(),  OK);
224    // assume we can submit all input before dequeuing output
225    size_t frameIx = 0;
226    size_t ix;
227    sp<MediaCodecBuffer> buf;
228    for (const FrameData &frame : avcStream_B) {
229        EXPECT_EQ(codec->dequeueInputBuffer(&ix, 1000000), OK);
230        EXPECT_EQ(codec->getInputBuffer(ix, &buf),  OK);
231        ASSERT_GE(buf->capacity(), frame.size);
232        memcpy(buf->base(), frame.data, frame.size);
233        EXPECT_EQ(buf->setRange(0, frame.size), OK);
234        bool eos = ++frameIx == NELEM(avcStream_B);
235        EXPECT_EQ(codec->queueInputBuffer(ix, 0, frame.size, frameIx * 33333,
236                                          eos ? BUFFER_FLAG_END_OF_STREAM : 0),  OK);
237    }
238
239    size_t offset, size;
240    int64_t ts;
241    uint32_t flags;
242    bool mInfoFormatChangedOk = true;
243    bool mInfoBuffersChangedOk = true;
244    while (true) {
245        status_t err = codec->dequeueOutputBuffer(&ix, &offset, &size, &ts, &flags, 1000000);
246        if (err == INFO_FORMAT_CHANGED && mInfoFormatChangedOk) {
247            mInfoFormatChangedOk = false;
248        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED && mInfoBuffersChangedOk) {
249            mInfoBuffersChangedOk = false;
250        } else {
251            ASSERT_EQ(err, OK);
252            break;
253        }
254    }
255    EXPECT_EQ(codec->getOutputBuffer(ix, &buf), OK);
256    EXPECT_EQ(codec->getOutputFormat(ix, &ofmt), OK);
257    ASSERT_TRUE(ofmt->findBuffer("hdr-static-info", &oinfo));
258    ASSERT_EQ(oinfo->size(), sizeof(info));
259    EXPECT_EQ(memcmp(oinfo->data(), &info, sizeof(info)),  0);
260}
261
262TEST_F(MediaCodecSanityTest, TestVideoDecoderHdrStaticInfo) {
263    codec = MediaCodec::CreateByComponentName(looper, "c2.android.mpeg4.decoder");
264    cfg->setInt32("width", 320);
265    cfg->setInt32("height", 240);
266    cfg->setString("mime", MIMETYPE_VIDEO_MPEG4);
267    HDRStaticInfo info = { .mID = HDRStaticInfo::kType1, .sType1 = {
268        .mR = { .x = 35400, .y = 14600 }, .mG = { .x = 8500,  .y = 39850 },
269        .mB = { .x = 6550,  .y =  2300 }, .mW = { .x = 15635, .y = 16450 },
270        .mMaxDisplayLuminance = 1000, .mMinDisplayLuminance = 1000,
271        .mMaxContentLightLevel = 1000, .mMaxFrameAverageLightLevel = 120 }
272    };
273    cfg->setBuffer("hdr-static-info", ABuffer::CreateAsCopy(&info, sizeof(info)));
274
275    EXPECT_EQ(codec->configure(cfg, nullptr, nullptr, 0), OK);
276    EXPECT_EQ(codec->getOutputFormat(&ofmt), OK);
277    sp<ABuffer> oinfo;
278    ASSERT_TRUE(ofmt->findBuffer("hdr-static-info", &oinfo));
279    ASSERT_EQ(oinfo->size(), sizeof(info));
280    EXPECT_EQ(memcmp(oinfo->data(), &info, sizeof(info)),  0);
281}
282
283class MediaCodecByteBufferTest : public MediaCodecSanityTest,
284        public ::testing::WithParamInterface<int32_t> {
285};
286
287TEST_P(MediaCodecByteBufferTest, TestVideoDecoder420Planar) {
288    codec = MediaCodec::CreateByComponentName(looper, "c2.android.avc.decoder");
289//    codec = MediaCodec::CreateByComponentName(looper, "OMX.google.h264.decoder");
290    cfg->setInt32("width", 320);
291    cfg->setInt32("height", 240);
292    cfg->setString("mime", MIMETYPE_VIDEO_AVC);
293    const int32_t Color = GetParam();
294    if (Color >= 0) {
295        cfg->setInt32("color-format", Color);
296    }
297    int32_t xcolor = Color == -1 ? COLOR_FormatYUV420Planar : Color;
298
299    EXPECT_EQ(codec->configure(cfg, nullptr, nullptr, 0), OK);
300    EXPECT_EQ(codec->getOutputFormat(&ofmt), OK);
301    int32_t ocolor = -1;
302    EXPECT_TRUE(ofmt->findInt32("color-format", &ocolor));
303    EXPECT_EQ(ocolor, xcolor);
304
305    EXPECT_EQ(codec->start(),  OK);
306    // assume we can submit all input before dequeuing output
307    size_t frameIx = 0;
308    size_t ix;
309    sp<MediaCodecBuffer> buf;
310    for (const FrameData &frame : avcStream_A) {
311        EXPECT_EQ(codec->dequeueInputBuffer(&ix, 1000000), OK);
312        EXPECT_EQ(codec->getInputBuffer(ix, &buf),  OK);
313        ASSERT_GE(buf->capacity(), frame.size);
314        memcpy(buf->base(), frame.data, frame.size);
315        EXPECT_EQ(buf->setRange(0, frame.size), OK);
316        bool eos = ++frameIx == NELEM(avcStream_A);
317        EXPECT_EQ(codec->queueInputBuffer(ix, 0, frame.size, frameIx * 33333,
318                                          eos ? BUFFER_FLAG_END_OF_STREAM : 0),  OK);
319    }
320
321    size_t offset, size;
322    int64_t ts;
323    uint32_t flags;
324    bool mInfoFormatChangedOk = true;
325    bool mInfoBuffersChangedOk = true;
326    while (true) {
327        status_t err = codec->dequeueOutputBuffer(&ix, &offset, &size, &ts, &flags, 1000000);
328        if (err == INFO_FORMAT_CHANGED && mInfoFormatChangedOk) {
329            mInfoFormatChangedOk = false;
330        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED && mInfoBuffersChangedOk) {
331            mInfoBuffersChangedOk = false;
332        } else {
333            ASSERT_EQ(err, OK);
334            break;
335        }
336    }
337    EXPECT_EQ(codec->getOutputBuffer(ix, &buf), OK);
338    EXPECT_EQ(codec->getOutputFormat(ix, &ofmt), OK);
339    ASSERT_TRUE(ofmt->findInt32("color-format", &ocolor));
340    EXPECT_EQ(ocolor, xcolor) << ofmt->debugString(8).c_str() << buf->meta()->debugString(8).c_str();
341    // expect an image-data in both format and meta
342    sp<ABuffer> imgBuf, imgBuf2;
343    ASSERT_TRUE(ofmt->findBuffer("image-data", &imgBuf));
344    ASSERT_TRUE(buf->meta()->findBuffer("image-data", &imgBuf2));
345    EXPECT_EQ(imgBuf->size(), sizeof(MediaImage2));
346    ASSERT_EQ(imgBuf->size(), imgBuf2->size());
347    EXPECT_EQ(0, memcmp(imgBuf->data(), imgBuf2->data(), imgBuf->size()));
348    MediaImage2 *img = (MediaImage2*)imgBuf->data();
349    EXPECT_EQ(img->mType, img->MEDIA_IMAGE_TYPE_YUV);
350    EXPECT_EQ(img->mNumPlanes, 3u);
351    EXPECT_EQ(img->mWidth, 320u);
352    EXPECT_EQ(img->mHeight, 240u);
353    EXPECT_EQ(img->mBitDepth, 8u);
354    EXPECT_EQ(img->mBitDepthAllocated, 8u);
355
356    // read strides from format
357    int32_t stride, vstride;
358    ofmt->findInt32("stride", &stride)          || ofmt->findInt32("width", &stride);
359    ofmt->findInt32("slice-height", &vstride)   || ofmt->findInt32("height", &vstride);
360
361    EXPECT_EQ(img->mPlane[img->Y].mHorizSubsampling, 1u);
362    EXPECT_EQ(img->mPlane[img->Y].mVertSubsampling, 1u);
363    EXPECT_EQ(img->mPlane[img->U].mHorizSubsampling, 2u);
364    EXPECT_EQ(img->mPlane[img->U].mVertSubsampling, 2u);
365    EXPECT_EQ(img->mPlane[img->V].mHorizSubsampling, 2u);
366    EXPECT_EQ(img->mPlane[img->V].mVertSubsampling, 2u);
367
368    switch (xcolor) {
369        // defined formats
370        case COLOR_FormatYUV420Planar:
371        case COLOR_FormatYUV420PackedPlanar:
372            EXPECT_EQ(img->mPlane[img->Y].mOffset, 0u);
373            EXPECT_EQ(img->mPlane[img->Y].mColInc, 1);
374            EXPECT_EQ(img->mPlane[img->Y].mRowInc, stride);
375
376            EXPECT_EQ(img->mPlane[img->U].mOffset, (uint32_t)(stride * vstride));
377            EXPECT_EQ(img->mPlane[img->U].mColInc, 1);
378            EXPECT_EQ(img->mPlane[img->U].mRowInc, stride / 2);
379
380            EXPECT_EQ(img->mPlane[img->V].mOffset, (uint32_t)(stride * vstride * 5 / 4));
381            EXPECT_EQ(img->mPlane[img->V].mColInc, 1);
382            EXPECT_EQ(img->mPlane[img->V].mRowInc, stride / 2);
383
384            EXPECT_GE(size, (size_t)(stride * vstride * 5 / 4 + stride / 2 * 119 + 160));
385            EXPECT_LE(size, (size_t)(stride * vstride * 3 / 2));
386            break;
387
388        case COLOR_FormatYUV420SemiPlanar:
389        case COLOR_FormatYUV420PackedSemiPlanar:
390            EXPECT_EQ(img->mPlane[img->Y].mOffset, 0u);
391            EXPECT_EQ(img->mPlane[img->Y].mColInc, 1);
392            EXPECT_EQ(img->mPlane[img->Y].mRowInc, stride);
393
394            EXPECT_EQ(img->mPlane[img->U].mOffset, (uint32_t)(stride * vstride));
395            EXPECT_EQ(img->mPlane[img->U].mColInc, 2);
396            EXPECT_EQ(img->mPlane[img->U].mRowInc, stride);
397
398            EXPECT_EQ(img->mPlane[img->V].mOffset, (uint32_t)(stride * vstride + 1));
399            EXPECT_EQ(img->mPlane[img->V].mColInc, 2);
400            EXPECT_EQ(img->mPlane[img->V].mRowInc, stride);
401
402            EXPECT_GE(size, (size_t)(stride * vstride + stride * 119 + 320));
403            EXPECT_LE(size, (size_t)(stride * vstride * 3 / 2));
404            break;
405
406        case COLOR_FormatYUV420Flexible:
407            // anything goes, but stride should match Y plane
408            EXPECT_EQ(img->mPlane[img->Y].mRowInc, stride);
409
410            EXPECT_GE(size,
411                      std::max({
412                            img->mPlane[img->Y].mOffset + 239 * img->mPlane[img->Y].mRowInc
413                                    + 319 * img->mPlane[img->Y].mColInc + 1,
414                            img->mPlane[img->U].mOffset + 119 * img->mPlane[img->U].mRowInc
415                                    + 159 * img->mPlane[img->U].mColInc + 1,
416                            img->mPlane[img->V].mOffset + 119 * img->mPlane[img->V].mRowInc
417                                    + 159 * img->mPlane[img->V].mColInc + 1 }));
418            break;
419
420        default:
421            break;
422    }
423
424    // validate all pixels
425#if 0
426    fprintf(stderr, "MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }\n",
427            img->mWidth, img->mHeight,
428            img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc,
429            img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc,
430            img->mPlane[2].mOffset, img->mPlane[2].mColInc, img->mPlane[2].mRowInc);
431#endif
432    for (ix = 0; ix < 3; ++ix) {
433        const static uint8_t expected[] = { 210, 16, 146 };
434        for (uint32_t y = 0; y < img->mHeight / img->mPlane[ix].mVertSubsampling ; ++y) {
435            for (uint32_t x = 0; x < img->mWidth / img->mPlane[ix].mHorizSubsampling; ++x) {
436                uint8_t val = buf->data()[img->mPlane[ix].mOffset + img->mPlane[ix].mColInc * x
437                        + img->mPlane[ix].mRowInc * y];
438                ASSERT_EQ(val, expected[ix]) << "incorrect value for plane "
439                        << ix << " at x=" << x << ", y=" << y;
440            }
441        }
442    }
443}
444
445INSTANTIATE_TEST_CASE_P(InputSizes, MediaCodecByteBufferTest, ::testing::Values(
446        -1,
447        COLOR_FormatYUV420Planar,
448        COLOR_FormatYUV420SemiPlanar,
449        COLOR_FormatYUV420PackedPlanar,
450        COLOR_FormatYUV420PackedSemiPlanar,
451        COLOR_FormatYUV420Flexible));
452
453} // namespace android
454