1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "mediafilterTest"
19
20#include <inttypes.h>
21
22#include <binder/ProcessState.h>
23#include <filters/ColorConvert.h>
24#include <gui/ISurfaceComposer.h>
25#include <gui/SurfaceComposerClient.h>
26#include <gui/Surface.h>
27#include <media/ICrypto.h>
28#include <media/IMediaHTTPService.h>
29#include <media/stagefright/foundation/ABuffer.h>
30#include <media/stagefright/foundation/ADebug.h>
31#include <media/stagefright/foundation/AMessage.h>
32#include <media/stagefright/DataSource.h>
33#include <media/stagefright/MediaCodec.h>
34#include <media/stagefright/NuMediaExtractor.h>
35#include <media/stagefright/RenderScriptWrapper.h>
36#include <OMX_IVCommon.h>
37#include <ui/DisplayInfo.h>
38
39#include "RenderScript.h"
40#include "ScriptC_argbtorgba.h"
41#include "ScriptC_nightvision.h"
42#include "ScriptC_saturation.h"
43
44// test parameters
45static const bool kTestFlush = true;        // Note: true will drop 1 out of
46static const int kFlushAfterFrames = 25;    // kFlushAfterFrames output frames
47static const int64_t kTimeout = 500ll;
48
49// built-in filter parameters
50static const int32_t kInvert = false;   // ZeroFilter param
51static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param
52static const float kSaturation = 0.0f;  // SaturationFilter param
53
54static void usage(const char *me) {
55    fprintf(stderr, "usage: [flags] %s\n"
56                    "\t[-b] use IntrinsicBlurFilter\n"
57                    "\t[-c] use argb to rgba conversion RSFilter\n"
58                    "\t[-n] use night vision RSFilter\n"
59                    "\t[-r] use saturation RSFilter\n"
60                    "\t[-s] use SaturationFilter\n"
61                    "\t[-z] use ZeroFilter (copy filter)\n"
62                    "\t[-R] render output to surface (enables -S)\n"
63                    "\t[-S] allocate buffers from a surface\n"
64                    "\t[-T] use render timestamps (enables -R)\n",
65                    me);
66    exit(1);
67}
68
69namespace android {
70
71struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback {
72    void init(RSC::sp<RSC::RS> context) {
73        mScript = new ScriptC_saturation(context);
74        mScript->set_gSaturation(3.f);
75    }
76
77    virtual status_t processBuffers(
78            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
79        mScript->forEach_root(inBuffer, outBuffer);
80
81        return OK;
82    }
83
84    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
85        return OK;
86    }
87
88private:
89    RSC::sp<ScriptC_saturation> mScript;
90};
91
92struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback {
93    void init(RSC::sp<RSC::RS> context) {
94        mScript = new ScriptC_nightvision(context);
95    }
96
97    virtual status_t processBuffers(
98            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
99        mScript->forEach_root(inBuffer, outBuffer);
100
101        return OK;
102    }
103
104    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
105        return OK;
106    }
107
108private:
109    RSC::sp<ScriptC_nightvision> mScript;
110};
111
112struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback {
113    void init(RSC::sp<RSC::RS> context) {
114        mScript = new ScriptC_argbtorgba(context);
115    }
116
117    virtual status_t processBuffers(
118            RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
119        mScript->forEach_root(inBuffer, outBuffer);
120
121        return OK;
122    }
123
124    status_t handleSetParameters(const sp<AMessage> &msg __unused) {
125        return OK;
126    }
127
128private:
129    RSC::sp<ScriptC_argbtorgba> mScript;
130};
131
132struct CodecState {
133    sp<MediaCodec> mCodec;
134    Vector<sp<ABuffer> > mInBuffers;
135    Vector<sp<ABuffer> > mOutBuffers;
136    bool mSignalledInputEOS;
137    bool mSawOutputEOS;
138    int64_t mNumBuffersDecoded;
139};
140
141struct DecodedFrame {
142    size_t index;
143    size_t offset;
144    size_t size;
145    int64_t presentationTimeUs;
146    uint32_t flags;
147};
148
149enum FilterType {
150    FILTERTYPE_ZERO,
151    FILTERTYPE_INTRINSIC_BLUR,
152    FILTERTYPE_SATURATION,
153    FILTERTYPE_RS_SATURATION,
154    FILTERTYPE_RS_NIGHT_VISION,
155    FILTERTYPE_RS_ARGB_TO_RGBA,
156};
157
158size_t inputFramesSinceFlush = 0;
159void tryCopyDecodedBuffer(
160        List<DecodedFrame> *decodedFrameIndices,
161        CodecState *filterState,
162        CodecState *vidState) {
163    if (decodedFrameIndices->empty()) {
164        return;
165    }
166
167    size_t filterIndex;
168    status_t err = filterState->mCodec->dequeueInputBuffer(
169            &filterIndex, kTimeout);
170    if (err != OK) {
171        return;
172    }
173
174    ++inputFramesSinceFlush;
175
176    DecodedFrame frame = *decodedFrameIndices->begin();
177
178    // only consume a buffer if we are not going to flush, since we expect
179    // the dequeue -> flush -> queue operation to cause an error and
180    // not produce an output frame
181    if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) {
182        decodedFrameIndices->erase(decodedFrameIndices->begin());
183    }
184    size_t outIndex = frame.index;
185
186    const sp<ABuffer> &srcBuffer =
187        vidState->mOutBuffers.itemAt(outIndex);
188    const sp<ABuffer> &destBuffer =
189        filterState->mInBuffers.itemAt(filterIndex);
190
191    sp<AMessage> srcFormat, destFormat;
192    vidState->mCodec->getOutputFormat(&srcFormat);
193    filterState->mCodec->getInputFormat(&destFormat);
194
195    int32_t srcWidth, srcHeight, srcStride, srcSliceHeight;
196    int32_t srcColorFormat, destColorFormat;
197    int32_t destWidth, destHeight, destStride, destSliceHeight;
198    CHECK(srcFormat->findInt32("stride", &srcStride)
199            && srcFormat->findInt32("slice-height", &srcSliceHeight)
200            && srcFormat->findInt32("width", &srcWidth)
201            && srcFormat->findInt32("height", & srcHeight)
202            && srcFormat->findInt32("color-format", &srcColorFormat));
203    CHECK(destFormat->findInt32("stride", &destStride)
204            && destFormat->findInt32("slice-height", &destSliceHeight)
205            && destFormat->findInt32("width", &destWidth)
206            && destFormat->findInt32("height", & destHeight)
207            && destFormat->findInt32("color-format", &destColorFormat));
208
209    CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight);
210
211    convertYUV420spToARGB(
212            srcBuffer->data(),
213            srcBuffer->data() + srcStride * srcSliceHeight,
214            srcWidth,
215            srcHeight,
216            destBuffer->data());
217
218    // copy timestamp
219    int64_t timeUs;
220    CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
221    destBuffer->meta()->setInt64("timeUs", timeUs);
222
223    if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) {
224        inputFramesSinceFlush = 0;
225
226        // check that queueing a buffer that was dequeued before flush
227        // fails with expected error EACCES
228        filterState->mCodec->flush();
229
230        err = filterState->mCodec->queueInputBuffer(
231                filterIndex, 0 /* offset */, destBuffer->size(),
232                timeUs, frame.flags);
233
234        if (err == OK) {
235            ALOGE("FAIL: queue after flush returned OK");
236        } else if (err != -EACCES) {
237            ALOGE("queueInputBuffer after flush returned %d, "
238                    "expected -EACCES (-13)", err);
239        }
240    } else {
241        err = filterState->mCodec->queueInputBuffer(
242                filterIndex, 0 /* offset */, destBuffer->size(),
243                timeUs, frame.flags);
244        CHECK(err == OK);
245
246        err = vidState->mCodec->releaseOutputBuffer(outIndex);
247        CHECK(err == OK);
248    }
249}
250
251size_t outputFramesSinceFlush = 0;
252void tryDrainOutputBuffer(
253        CodecState *filterState,
254        const sp<Surface> &surface, bool renderSurface,
255        bool useTimestamp, int64_t *startTimeRender) {
256    size_t index;
257    size_t offset;
258    size_t size;
259    int64_t presentationTimeUs;
260    uint32_t flags;
261    status_t err = filterState->mCodec->dequeueOutputBuffer(
262            &index, &offset, &size, &presentationTimeUs, &flags,
263            kTimeout);
264
265    if (err != OK) {
266        return;
267    }
268
269    ++outputFramesSinceFlush;
270
271    if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
272        filterState->mCodec->flush();
273    }
274
275    if (surface == NULL || !renderSurface) {
276        err = filterState->mCodec->releaseOutputBuffer(index);
277    } else if (useTimestamp) {
278        if (*startTimeRender == -1) {
279            // begin rendering 2 vsyncs after first decode
280            *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC)
281                    + 33000000 - (presentationTimeUs * 1000);
282        }
283        presentationTimeUs =
284                (presentationTimeUs * 1000) + *startTimeRender;
285        err = filterState->mCodec->renderOutputBufferAndRelease(
286                index, presentationTimeUs);
287    } else {
288        err = filterState->mCodec->renderOutputBufferAndRelease(index);
289    }
290
291    if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
292        outputFramesSinceFlush = 0;
293
294        // releasing the buffer dequeued before flush should cause an error
295        // if so, the frame will also be skipped in output stream
296        if (err == OK) {
297            ALOGE("FAIL: release after flush returned OK");
298        } else if (err != -EACCES) {
299            ALOGE("releaseOutputBuffer after flush returned %d, "
300                    "expected -EACCES (-13)", err);
301        }
302    } else {
303        CHECK(err == OK);
304    }
305
306    if (flags & MediaCodec::BUFFER_FLAG_EOS) {
307        ALOGV("reached EOS on output.");
308        filterState->mSawOutputEOS = true;
309    }
310}
311
312static int decode(
313        const sp<ALooper> &looper,
314        const char *path,
315        const sp<Surface> &surface,
316        bool renderSurface,
317        bool useTimestamp,
318        FilterType filterType) {
319
320    static int64_t kTimeout = 500ll;
321
322    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
323    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
324        fprintf(stderr, "unable to instantiate extractor.\n");
325        return 1;
326    }
327
328    KeyedVector<size_t, CodecState> stateByTrack;
329
330    CodecState *vidState = NULL;
331    for (size_t i = 0; i < extractor->countTracks(); ++i) {
332        sp<AMessage> format;
333        status_t err = extractor->getTrackFormat(i, &format);
334        CHECK(err == OK);
335
336        AString mime;
337        CHECK(format->findString("mime", &mime));
338        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
339        if (!isVideo) {
340            continue;
341        }
342
343        ALOGV("selecting track %zu", i);
344
345        err = extractor->selectTrack(i);
346        CHECK(err == OK);
347
348        CodecState *state =
349            &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
350
351        vidState = state;
352
353        state->mNumBuffersDecoded = 0;
354
355        state->mCodec = MediaCodec::CreateByType(
356                looper, mime.c_str(), false /* encoder */);
357
358        CHECK(state->mCodec != NULL);
359
360        err = state->mCodec->configure(
361                format, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
362
363        CHECK(err == OK);
364
365        state->mSignalledInputEOS = false;
366        state->mSawOutputEOS = false;
367
368        break;
369    }
370    CHECK(!stateByTrack.isEmpty());
371    CHECK(vidState != NULL);
372    sp<AMessage> vidFormat;
373    vidState->mCodec->getOutputFormat(&vidFormat);
374
375    // set filter to use ARGB8888
376    vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888);
377    // set app cache directory path
378    vidFormat->setString("cacheDir", "/system/bin");
379
380    // create RenderScript context for RSFilters
381    RSC::sp<RSC::RS> context = new RSC::RS();
382    context->init("/system/bin");
383
384    sp<RenderScriptWrapper::RSFilterCallback> rsFilter;
385
386    // create renderscript wrapper for RSFilters
387    sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper;
388    rsWrapper->mContext = context.get();
389
390    CodecState *filterState = new CodecState();
391    filterState->mNumBuffersDecoded = 0;
392
393    sp<AMessage> params = new AMessage();
394
395    switch (filterType) {
396        case FILTERTYPE_ZERO:
397        {
398            filterState->mCodec = MediaCodec::CreateByComponentName(
399                    looper, "android.filter.zerofilter");
400            params->setInt32("invert", kInvert);
401            break;
402        }
403        case FILTERTYPE_INTRINSIC_BLUR:
404        {
405            filterState->mCodec = MediaCodec::CreateByComponentName(
406                    looper, "android.filter.intrinsicblur");
407            params->setFloat("blur-radius", kBlurRadius);
408            break;
409        }
410        case FILTERTYPE_SATURATION:
411        {
412            filterState->mCodec = MediaCodec::CreateByComponentName(
413                    looper, "android.filter.saturation");
414            params->setFloat("saturation", kSaturation);
415            break;
416        }
417        case FILTERTYPE_RS_SATURATION:
418        {
419            SaturationRSFilter *satFilter = new SaturationRSFilter;
420            satFilter->init(context);
421            rsFilter = satFilter;
422            rsWrapper->mCallback = rsFilter;
423            vidFormat->setObject("rs-wrapper", rsWrapper);
424
425            filterState->mCodec = MediaCodec::CreateByComponentName(
426                    looper, "android.filter.RenderScript");
427            break;
428        }
429        case FILTERTYPE_RS_NIGHT_VISION:
430        {
431            NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter;
432            nightVisionFilter->init(context);
433            rsFilter = nightVisionFilter;
434            rsWrapper->mCallback = rsFilter;
435            vidFormat->setObject("rs-wrapper", rsWrapper);
436
437            filterState->mCodec = MediaCodec::CreateByComponentName(
438                    looper, "android.filter.RenderScript");
439            break;
440        }
441        case FILTERTYPE_RS_ARGB_TO_RGBA:
442        {
443            ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter;
444            argbToRgbaFilter->init(context);
445            rsFilter = argbToRgbaFilter;
446            rsWrapper->mCallback = rsFilter;
447            vidFormat->setObject("rs-wrapper", rsWrapper);
448
449            filterState->mCodec = MediaCodec::CreateByComponentName(
450                    looper, "android.filter.RenderScript");
451            break;
452        }
453        default:
454        {
455            LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType");
456            break;
457        }
458    }
459    CHECK(filterState->mCodec != NULL);
460
461    status_t err = filterState->mCodec->configure(
462            vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */);
463    CHECK(err == OK);
464
465    filterState->mSignalledInputEOS = false;
466    filterState->mSawOutputEOS = false;
467
468    int64_t startTimeUs = ALooper::GetNowUs();
469    int64_t startTimeRender = -1;
470
471    for (size_t i = 0; i < stateByTrack.size(); ++i) {
472        CodecState *state = &stateByTrack.editValueAt(i);
473
474        sp<MediaCodec> codec = state->mCodec;
475
476        CHECK_EQ((status_t)OK, codec->start());
477
478        CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
479        CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
480
481        ALOGV("got %zu input and %zu output buffers",
482                state->mInBuffers.size(), state->mOutBuffers.size());
483    }
484
485    CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params));
486
487    if (kTestFlush) {
488        status_t flushErr = filterState->mCodec->flush();
489        if (flushErr == OK) {
490            ALOGE("FAIL: Flush before start returned OK");
491        } else {
492            ALOGV("Flush before start returned status %d, usually ENOSYS (-38)",
493                    flushErr);
494        }
495    }
496
497    CHECK_EQ((status_t)OK, filterState->mCodec->start());
498    CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers(
499            &filterState->mInBuffers));
500    CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers(
501            &filterState->mOutBuffers));
502
503    if (kTestFlush) {
504        status_t flushErr = filterState->mCodec->flush();
505        if (flushErr != OK) {
506            ALOGE("FAIL: Flush after start returned %d, expect OK (0)",
507                    flushErr);
508        } else {
509            ALOGV("Flush immediately after start OK");
510        }
511    }
512
513    List<DecodedFrame> decodedFrameIndices;
514
515    // loop until decoder reaches EOS
516    bool sawInputEOS = false;
517    bool sawOutputEOSOnAllTracks = false;
518    while (!sawOutputEOSOnAllTracks) {
519        if (!sawInputEOS) {
520            size_t trackIndex;
521            status_t err = extractor->getSampleTrackIndex(&trackIndex);
522
523            if (err != OK) {
524                ALOGV("saw input eos");
525                sawInputEOS = true;
526            } else {
527                CodecState *state = &stateByTrack.editValueFor(trackIndex);
528
529                size_t index;
530                err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
531
532                if (err == OK) {
533                    ALOGV("filling input buffer %zu", index);
534
535                    const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
536
537                    err = extractor->readSampleData(buffer);
538                    CHECK(err == OK);
539
540                    int64_t timeUs;
541                    err = extractor->getSampleTime(&timeUs);
542                    CHECK(err == OK);
543
544                    uint32_t bufferFlags = 0;
545
546                    err = state->mCodec->queueInputBuffer(
547                            index, 0 /* offset */, buffer->size(),
548                            timeUs, bufferFlags);
549
550                    CHECK(err == OK);
551
552                    extractor->advance();
553                } else {
554                    CHECK_EQ(err, -EAGAIN);
555                }
556            }
557        } else {
558            for (size_t i = 0; i < stateByTrack.size(); ++i) {
559                CodecState *state = &stateByTrack.editValueAt(i);
560
561                if (!state->mSignalledInputEOS) {
562                    size_t index;
563                    status_t err =
564                        state->mCodec->dequeueInputBuffer(&index, kTimeout);
565
566                    if (err == OK) {
567                        ALOGV("signalling input EOS on track %zu", i);
568
569                        err = state->mCodec->queueInputBuffer(
570                                index, 0 /* offset */, 0 /* size */,
571                                0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS);
572
573                        CHECK(err == OK);
574
575                        state->mSignalledInputEOS = true;
576                    } else {
577                        CHECK_EQ(err, -EAGAIN);
578                    }
579                }
580            }
581        }
582
583        sawOutputEOSOnAllTracks = true;
584        for (size_t i = 0; i < stateByTrack.size(); ++i) {
585            CodecState *state = &stateByTrack.editValueAt(i);
586
587            if (state->mSawOutputEOS) {
588                continue;
589            } else {
590                sawOutputEOSOnAllTracks = false;
591            }
592
593            DecodedFrame frame;
594            status_t err = state->mCodec->dequeueOutputBuffer(
595                    &frame.index, &frame.offset, &frame.size,
596                    &frame.presentationTimeUs, &frame.flags, kTimeout);
597
598            if (err == OK) {
599                ALOGV("draining decoded buffer %zu, time = %lld us",
600                        frame.index, (long long)frame.presentationTimeUs);
601
602                ++(state->mNumBuffersDecoded);
603
604                decodedFrameIndices.push_back(frame);
605
606                if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) {
607                    ALOGV("reached EOS on decoder output.");
608                    state->mSawOutputEOS = true;
609                }
610
611            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
612                ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
613                CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers(
614                        &state->mOutBuffers));
615
616                ALOGV("got %zu output buffers", state->mOutBuffers.size());
617            } else if (err == INFO_FORMAT_CHANGED) {
618                sp<AMessage> format;
619                CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
620
621                ALOGV("INFO_FORMAT_CHANGED: %s",
622                        format->debugString().c_str());
623            } else {
624                CHECK_EQ(err, -EAGAIN);
625            }
626
627            tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
628
629            tryDrainOutputBuffer(
630                    filterState, surface, renderSurface,
631                    useTimestamp, &startTimeRender);
632        }
633    }
634
635    // after EOS on decoder, let filter reach EOS
636    while (!filterState->mSawOutputEOS) {
637        tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
638
639        tryDrainOutputBuffer(
640                filterState, surface, renderSurface,
641                useTimestamp, &startTimeRender);
642    }
643
644    int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs;
645
646    for (size_t i = 0; i < stateByTrack.size(); ++i) {
647        CodecState *state = &stateByTrack.editValueAt(i);
648
649        CHECK_EQ((status_t)OK, state->mCodec->release());
650
651        printf("track %zu: %" PRId64 " frames decoded and filtered, "
652                "%.2f fps.\n", i, state->mNumBuffersDecoded,
653                state->mNumBuffersDecoded * 1E6 / elapsedTimeUs);
654    }
655
656    return 0;
657}
658
659}  // namespace android
660
661int main(int argc, char **argv) {
662    using namespace android;
663
664    const char *me = argv[0];
665
666    bool useSurface = false;
667    bool renderSurface = false;
668    bool useTimestamp = false;
669    FilterType filterType = FILTERTYPE_ZERO;
670
671    int res;
672    while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) {
673        switch (res) {
674            case 'b':
675            {
676                filterType = FILTERTYPE_INTRINSIC_BLUR;
677                break;
678            }
679            case 'c':
680            {
681                filterType = FILTERTYPE_RS_ARGB_TO_RGBA;
682                break;
683            }
684            case 'n':
685            {
686                filterType = FILTERTYPE_RS_NIGHT_VISION;
687                break;
688            }
689            case 'r':
690            {
691                filterType = FILTERTYPE_RS_SATURATION;
692                break;
693            }
694            case 's':
695            {
696                filterType = FILTERTYPE_SATURATION;
697                break;
698            }
699            case 'z':
700            {
701                filterType = FILTERTYPE_ZERO;
702                break;
703            }
704            case 'T':
705            {
706                useTimestamp = true;
707            }
708            // fall through
709            case 'R':
710            {
711                renderSurface = true;
712            }
713            // fall through
714            case 'S':
715            {
716                useSurface = true;
717                break;
718            }
719            case '?':
720            case 'h':
721            default:
722            {
723                usage(me);
724                break;
725            }
726        }
727    }
728
729    argc -= optind;
730    argv += optind;
731
732    if (argc != 1) {
733        usage(me);
734    }
735
736    ProcessState::self()->startThreadPool();
737
738    DataSource::RegisterDefaultSniffers();
739
740    android::sp<ALooper> looper = new ALooper;
741    looper->start();
742
743    android::sp<SurfaceComposerClient> composerClient;
744    android::sp<SurfaceControl> control;
745    android::sp<Surface> surface;
746
747    if (useSurface) {
748        composerClient = new SurfaceComposerClient;
749        CHECK_EQ((status_t)OK, composerClient->initCheck());
750
751        android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
752                ISurfaceComposer::eDisplayIdMain));
753        DisplayInfo info;
754        SurfaceComposerClient::getDisplayInfo(display, &info);
755        ssize_t displayWidth = info.w;
756        ssize_t displayHeight = info.h;
757
758        ALOGV("display is %zd x %zd", displayWidth, displayHeight);
759
760        control = composerClient->createSurface(
761                String8("A Surface"), displayWidth, displayHeight,
762                PIXEL_FORMAT_RGBA_8888, 0);
763
764        CHECK(control != NULL);
765        CHECK(control->isValid());
766
767        SurfaceComposerClient::openGlobalTransaction();
768        CHECK_EQ((status_t)OK, control->setLayer(INT_MAX));
769        CHECK_EQ((status_t)OK, control->show());
770        SurfaceComposerClient::closeGlobalTransaction();
771
772        surface = control->getSurface();
773        CHECK(surface != NULL);
774    }
775
776    decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType);
777
778    if (useSurface) {
779        composerClient->dispose();
780    }
781
782    looper->stop();
783
784    return 0;
785}
786