1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "codec"
19#include <inttypes.h>
20#include <utils/Log.h>
21
22#include "SimplePlayer.h"
23
24#include <binder/IServiceManager.h>
25#include <binder/ProcessState.h>
26#include <media/ICrypto.h>
27#include <media/IMediaHTTPService.h>
28#include <media/IMediaPlayerService.h>
29#include <media/stagefright/foundation/ABuffer.h>
30#include <media/stagefright/foundation/ADebug.h>
31#include <media/stagefright/foundation/ALooper.h>
32#include <media/stagefright/foundation/AMessage.h>
33#include <media/stagefright/foundation/AString.h>
34#include <media/stagefright/DataSource.h>
35#include <media/stagefright/MediaCodec.h>
36#include <media/stagefright/MediaCodecList.h>
37#include <media/stagefright/MediaDefs.h>
38#include <media/stagefright/NuMediaExtractor.h>
39#include <gui/ISurfaceComposer.h>
40#include <gui/SurfaceComposerClient.h>
41#include <gui/Surface.h>
42#include <ui/DisplayInfo.h>
43
44static void usage(const char *me) {
45    fprintf(stderr, "usage: %s [-a] use audio\n"
46                    "\t\t[-v] use video\n"
47                    "\t\t[-p] playback\n"
48                    "\t\t[-S] allocate buffers from a surface\n"
49                    "\t\t[-R] render output to surface (enables -S)\n"
50                    "\t\t[-T] use render timestamps (enables -R)\n",
51                    me);
52    exit(1);
53}
54
55namespace android {
56
57struct CodecState {
58    sp<MediaCodec> mCodec;
59    Vector<sp<ABuffer> > mInBuffers;
60    Vector<sp<ABuffer> > mOutBuffers;
61    bool mSignalledInputEOS;
62    bool mSawOutputEOS;
63    int64_t mNumBuffersDecoded;
64    int64_t mNumBytesDecoded;
65    bool mIsAudio;
66};
67
68}  // namespace android
69
70static int decode(
71        const android::sp<android::ALooper> &looper,
72        const char *path,
73        bool useAudio,
74        bool useVideo,
75        const android::sp<android::Surface> &surface,
76        bool renderSurface,
77        bool useTimestamp) {
78    using namespace android;
79
80    static int64_t kTimeout = 500ll;
81
82    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
83    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
84        fprintf(stderr, "unable to instantiate extractor.\n");
85        return 1;
86    }
87
88    KeyedVector<size_t, CodecState> stateByTrack;
89
90    bool haveAudio = false;
91    bool haveVideo = false;
92    for (size_t i = 0; i < extractor->countTracks(); ++i) {
93        sp<AMessage> format;
94        status_t err = extractor->getTrackFormat(i, &format);
95        CHECK_EQ(err, (status_t)OK);
96
97        AString mime;
98        CHECK(format->findString("mime", &mime));
99
100        bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
101        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
102
103        if (useAudio && !haveAudio && isAudio) {
104            haveAudio = true;
105        } else if (useVideo && !haveVideo && isVideo) {
106            haveVideo = true;
107        } else {
108            continue;
109        }
110
111        ALOGV("selecting track %zu", i);
112
113        err = extractor->selectTrack(i);
114        CHECK_EQ(err, (status_t)OK);
115
116        CodecState *state =
117            &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
118
119        state->mNumBytesDecoded = 0;
120        state->mNumBuffersDecoded = 0;
121        state->mIsAudio = isAudio;
122
123        state->mCodec = MediaCodec::CreateByType(
124                looper, mime.c_str(), false /* encoder */);
125
126        CHECK(state->mCodec != NULL);
127
128        err = state->mCodec->configure(
129                format, isVideo ? surface : NULL,
130                NULL /* crypto */,
131                0 /* flags */);
132
133        CHECK_EQ(err, (status_t)OK);
134
135        state->mSignalledInputEOS = false;
136        state->mSawOutputEOS = false;
137    }
138
139    CHECK(!stateByTrack.isEmpty());
140
141    int64_t startTimeUs = ALooper::GetNowUs();
142    int64_t startTimeRender = -1;
143
144    for (size_t i = 0; i < stateByTrack.size(); ++i) {
145        CodecState *state = &stateByTrack.editValueAt(i);
146
147        sp<MediaCodec> codec = state->mCodec;
148
149        CHECK_EQ((status_t)OK, codec->start());
150
151        CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
152        CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
153
154        ALOGV("got %zu input and %zu output buffers",
155              state->mInBuffers.size(), state->mOutBuffers.size());
156    }
157
158    bool sawInputEOS = false;
159
160    for (;;) {
161        if (!sawInputEOS) {
162            size_t trackIndex;
163            status_t err = extractor->getSampleTrackIndex(&trackIndex);
164
165            if (err != OK) {
166                ALOGV("saw input eos");
167                sawInputEOS = true;
168            } else {
169                CodecState *state = &stateByTrack.editValueFor(trackIndex);
170
171                size_t index;
172                err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
173
174                if (err == OK) {
175                    ALOGV("filling input buffer %zu", index);
176
177                    const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
178
179                    err = extractor->readSampleData(buffer);
180                    CHECK_EQ(err, (status_t)OK);
181
182                    int64_t timeUs;
183                    err = extractor->getSampleTime(&timeUs);
184                    CHECK_EQ(err, (status_t)OK);
185
186                    uint32_t bufferFlags = 0;
187
188                    err = state->mCodec->queueInputBuffer(
189                            index,
190                            0 /* offset */,
191                            buffer->size(),
192                            timeUs,
193                            bufferFlags);
194
195                    CHECK_EQ(err, (status_t)OK);
196
197                    extractor->advance();
198                } else {
199                    CHECK_EQ(err, -EAGAIN);
200                }
201            }
202        } else {
203            for (size_t i = 0; i < stateByTrack.size(); ++i) {
204                CodecState *state = &stateByTrack.editValueAt(i);
205
206                if (!state->mSignalledInputEOS) {
207                    size_t index;
208                    status_t err =
209                        state->mCodec->dequeueInputBuffer(&index, kTimeout);
210
211                    if (err == OK) {
212                        ALOGV("signalling input EOS on track %zu", i);
213
214                        err = state->mCodec->queueInputBuffer(
215                                index,
216                                0 /* offset */,
217                                0 /* size */,
218                                0ll /* timeUs */,
219                                MediaCodec::BUFFER_FLAG_EOS);
220
221                        CHECK_EQ(err, (status_t)OK);
222
223                        state->mSignalledInputEOS = true;
224                    } else {
225                        CHECK_EQ(err, -EAGAIN);
226                    }
227                }
228            }
229        }
230
231        bool sawOutputEOSOnAllTracks = true;
232        for (size_t i = 0; i < stateByTrack.size(); ++i) {
233            CodecState *state = &stateByTrack.editValueAt(i);
234            if (!state->mSawOutputEOS) {
235                sawOutputEOSOnAllTracks = false;
236                break;
237            }
238        }
239
240        if (sawOutputEOSOnAllTracks) {
241            break;
242        }
243
244        for (size_t i = 0; i < stateByTrack.size(); ++i) {
245            CodecState *state = &stateByTrack.editValueAt(i);
246
247            if (state->mSawOutputEOS) {
248                continue;
249            }
250
251            size_t index;
252            size_t offset;
253            size_t size;
254            int64_t presentationTimeUs;
255            uint32_t flags;
256            status_t err = state->mCodec->dequeueOutputBuffer(
257                    &index, &offset, &size, &presentationTimeUs, &flags,
258                    kTimeout);
259
260            if (err == OK) {
261                ALOGV("draining output buffer %zu, time = %lld us",
262                      index, (long long)presentationTimeUs);
263
264                ++state->mNumBuffersDecoded;
265                state->mNumBytesDecoded += size;
266
267                if (surface == NULL || !renderSurface) {
268                    err = state->mCodec->releaseOutputBuffer(index);
269                } else if (useTimestamp) {
270                    if (startTimeRender == -1) {
271                        // begin rendering 2 vsyncs (~33ms) after first decode
272                        startTimeRender =
273                                systemTime(SYSTEM_TIME_MONOTONIC) + 33000000
274                                - (presentationTimeUs * 1000);
275                    }
276                    presentationTimeUs =
277                            (presentationTimeUs * 1000) + startTimeRender;
278                    err = state->mCodec->renderOutputBufferAndRelease(
279                            index, presentationTimeUs);
280                } else {
281                    err = state->mCodec->renderOutputBufferAndRelease(index);
282                }
283
284                CHECK_EQ(err, (status_t)OK);
285
286                if (flags & MediaCodec::BUFFER_FLAG_EOS) {
287                    ALOGV("reached EOS on output.");
288
289                    state->mSawOutputEOS = true;
290                }
291            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
292                ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
293                CHECK_EQ((status_t)OK,
294                         state->mCodec->getOutputBuffers(&state->mOutBuffers));
295
296                ALOGV("got %zu output buffers", state->mOutBuffers.size());
297            } else if (err == INFO_FORMAT_CHANGED) {
298                sp<AMessage> format;
299                CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
300
301                ALOGV("INFO_FORMAT_CHANGED: %s", format->debugString().c_str());
302            } else {
303                CHECK_EQ(err, -EAGAIN);
304            }
305        }
306    }
307
308    int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs;
309
310    for (size_t i = 0; i < stateByTrack.size(); ++i) {
311        CodecState *state = &stateByTrack.editValueAt(i);
312
313        CHECK_EQ((status_t)OK, state->mCodec->release());
314
315        if (state->mIsAudio) {
316            printf("track %zu: %lld bytes received. %.2f KB/sec\n",
317                   i,
318                   (long long)state->mNumBytesDecoded,
319                   state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
320        } else {
321            printf("track %zu: %lld frames decoded, %.2f fps. %lld"
322                    " bytes received. %.2f KB/sec\n",
323                   i,
324                   (long long)state->mNumBuffersDecoded,
325                   state->mNumBuffersDecoded * 1E6 / elapsedTimeUs,
326                   (long long)state->mNumBytesDecoded,
327                   state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
328        }
329    }
330
331    return 0;
332}
333
334int main(int argc, char **argv) {
335    using namespace android;
336
337    const char *me = argv[0];
338
339    bool useAudio = false;
340    bool useVideo = false;
341    bool playback = false;
342    bool useSurface = false;
343    bool renderSurface = false;
344    bool useTimestamp = false;
345
346    int res;
347    while ((res = getopt(argc, argv, "havpSDRT")) >= 0) {
348        switch (res) {
349            case 'a':
350            {
351                useAudio = true;
352                break;
353            }
354            case 'v':
355            {
356                useVideo = true;
357                break;
358            }
359            case 'p':
360            {
361                playback = true;
362                break;
363            }
364            case 'T':
365            {
366                useTimestamp = true;
367            }
368            // fall through
369            case 'R':
370            {
371                renderSurface = true;
372            }
373            // fall through
374            case 'S':
375            {
376                useSurface = true;
377                break;
378            }
379            case '?':
380            case 'h':
381            default:
382            {
383                usage(me);
384            }
385        }
386    }
387
388    argc -= optind;
389    argv += optind;
390
391    if (argc != 1) {
392        usage(me);
393    }
394
395    if (!useAudio && !useVideo) {
396        useAudio = useVideo = true;
397    }
398
399    ProcessState::self()->startThreadPool();
400
401    DataSource::RegisterDefaultSniffers();
402
403    sp<ALooper> looper = new ALooper;
404    looper->start();
405
406    sp<SurfaceComposerClient> composerClient;
407    sp<SurfaceControl> control;
408    sp<Surface> surface;
409
410    if (playback || (useSurface && useVideo)) {
411        composerClient = new SurfaceComposerClient;
412        CHECK_EQ(composerClient->initCheck(), (status_t)OK);
413
414        sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
415                ISurfaceComposer::eDisplayIdMain));
416        DisplayInfo info;
417        SurfaceComposerClient::getDisplayInfo(display, &info);
418        ssize_t displayWidth = info.w;
419        ssize_t displayHeight = info.h;
420
421        ALOGV("display is %zd x %zd\n", displayWidth, displayHeight);
422
423        control = composerClient->createSurface(
424                String8("A Surface"),
425                displayWidth,
426                displayHeight,
427                PIXEL_FORMAT_RGB_565,
428                0);
429
430        CHECK(control != NULL);
431        CHECK(control->isValid());
432
433        SurfaceComposerClient::openGlobalTransaction();
434        CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
435        CHECK_EQ(control->show(), (status_t)OK);
436        SurfaceComposerClient::closeGlobalTransaction();
437
438        surface = control->getSurface();
439        CHECK(surface != NULL);
440    }
441
442    if (playback) {
443        sp<SimplePlayer> player = new SimplePlayer;
444        looper->registerHandler(player);
445
446        player->setDataSource(argv[0]);
447        player->setSurface(surface->getIGraphicBufferProducer());
448        player->start();
449        sleep(60);
450        player->stop();
451        player->reset();
452    } else {
453        decode(looper, argv[0], useAudio, useVideo, surface, renderSurface,
454                useTimestamp);
455    }
456
457    if (playback || (useSurface && useVideo)) {
458        composerClient->dispose();
459    }
460
461    looper->stop();
462
463    return 0;
464}
465