1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "codec"
19#include <inttypes.h>
20#include <utils/Log.h>
21
22#include "SimplePlayer.h"
23
24#include <binder/IServiceManager.h>
25#include <binder/ProcessState.h>
26#include <media/ICrypto.h>
27#include <media/IMediaHTTPService.h>
28#include <media/IMediaPlayerService.h>
29#include <media/MediaCodecBuffer.h>
30#include <media/stagefright/foundation/ABuffer.h>
31#include <media/stagefright/foundation/ADebug.h>
32#include <media/stagefright/foundation/ALooper.h>
33#include <media/stagefright/foundation/AMessage.h>
34#include <media/stagefright/foundation/AString.h>
35#include <media/stagefright/MediaCodec.h>
36#include <media/stagefright/MediaCodecList.h>
37#include <media/stagefright/MediaDefs.h>
38#include <media/stagefright/NuMediaExtractor.h>
39#include <gui/ISurfaceComposer.h>
40#include <gui/SurfaceComposerClient.h>
41#include <gui/Surface.h>
42#include <ui/DisplayInfo.h>
43
44static void usage(const char *me) {
45    fprintf(stderr, "usage: %s [-a] use audio\n"
46                    "\t\t[-v] use video\n"
47                    "\t\t[-p] playback\n"
48                    "\t\t[-S] allocate buffers from a surface\n"
49                    "\t\t[-R] render output to surface (enables -S)\n"
50                    "\t\t[-T] use render timestamps (enables -R)\n",
51                    me);
52    exit(1);
53}
54
55namespace android {
56
57struct CodecState {
58    sp<MediaCodec> mCodec;
59    Vector<sp<MediaCodecBuffer> > mInBuffers;
60    Vector<sp<MediaCodecBuffer> > mOutBuffers;
61    bool mSignalledInputEOS;
62    bool mSawOutputEOS;
63    int64_t mNumBuffersDecoded;
64    int64_t mNumBytesDecoded;
65    bool mIsAudio;
66};
67
68}  // namespace android
69
70static int decode(
71        const android::sp<android::ALooper> &looper,
72        const char *path,
73        bool useAudio,
74        bool useVideo,
75        const android::sp<android::Surface> &surface,
76        bool renderSurface,
77        bool useTimestamp) {
78    using namespace android;
79
80    static int64_t kTimeout = 500ll;
81
82    sp<NuMediaExtractor> extractor = new NuMediaExtractor;
83    if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
84        fprintf(stderr, "unable to instantiate extractor.\n");
85        return 1;
86    }
87
88    KeyedVector<size_t, CodecState> stateByTrack;
89
90    bool haveAudio = false;
91    bool haveVideo = false;
92    for (size_t i = 0; i < extractor->countTracks(); ++i) {
93        sp<AMessage> format;
94        status_t err = extractor->getTrackFormat(i, &format);
95        CHECK_EQ(err, (status_t)OK);
96
97        AString mime;
98        CHECK(format->findString("mime", &mime));
99
100        bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
101        bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
102
103        if (useAudio && !haveAudio && isAudio) {
104            haveAudio = true;
105        } else if (useVideo && !haveVideo && isVideo) {
106            haveVideo = true;
107        } else {
108            continue;
109        }
110
111        ALOGV("selecting track %zu", i);
112
113        err = extractor->selectTrack(i);
114        CHECK_EQ(err, (status_t)OK);
115
116        CodecState *state =
117            &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
118
119        state->mNumBytesDecoded = 0;
120        state->mNumBuffersDecoded = 0;
121        state->mIsAudio = isAudio;
122
123        state->mCodec = MediaCodec::CreateByType(
124                looper, mime.c_str(), false /* encoder */);
125
126        CHECK(state->mCodec != NULL);
127
128        err = state->mCodec->configure(
129                format, isVideo ? surface : NULL,
130                NULL /* crypto */,
131                0 /* flags */);
132
133        CHECK_EQ(err, (status_t)OK);
134
135        state->mSignalledInputEOS = false;
136        state->mSawOutputEOS = false;
137    }
138
139    CHECK(!stateByTrack.isEmpty());
140
141    int64_t startTimeUs = ALooper::GetNowUs();
142    int64_t startTimeRender = -1;
143
144    for (size_t i = 0; i < stateByTrack.size(); ++i) {
145        CodecState *state = &stateByTrack.editValueAt(i);
146
147        sp<MediaCodec> codec = state->mCodec;
148
149        CHECK_EQ((status_t)OK, codec->start());
150
151        CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
152        CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
153
154        ALOGV("got %zu input and %zu output buffers",
155              state->mInBuffers.size(), state->mOutBuffers.size());
156    }
157
158    bool sawInputEOS = false;
159
160    for (;;) {
161        if (!sawInputEOS) {
162            size_t trackIndex;
163            status_t err = extractor->getSampleTrackIndex(&trackIndex);
164
165            if (err != OK) {
166                ALOGV("saw input eos");
167                sawInputEOS = true;
168            } else {
169                CodecState *state = &stateByTrack.editValueFor(trackIndex);
170
171                size_t index;
172                err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
173
174                if (err == OK) {
175                    ALOGV("filling input buffer %zu", index);
176
177                    const sp<MediaCodecBuffer> &buffer = state->mInBuffers.itemAt(index);
178                    sp<ABuffer> abuffer = new ABuffer(buffer->base(), buffer->capacity());
179
180                    err = extractor->readSampleData(abuffer);
181                    CHECK_EQ(err, (status_t)OK);
182                    buffer->setRange(abuffer->offset(), abuffer->size());
183
184                    int64_t timeUs;
185                    err = extractor->getSampleTime(&timeUs);
186                    CHECK_EQ(err, (status_t)OK);
187
188                    uint32_t bufferFlags = 0;
189
190                    err = state->mCodec->queueInputBuffer(
191                            index,
192                            0 /* offset */,
193                            buffer->size(),
194                            timeUs,
195                            bufferFlags);
196
197                    CHECK_EQ(err, (status_t)OK);
198
199                    extractor->advance();
200                } else {
201                    CHECK_EQ(err, -EAGAIN);
202                }
203            }
204        } else {
205            for (size_t i = 0; i < stateByTrack.size(); ++i) {
206                CodecState *state = &stateByTrack.editValueAt(i);
207
208                if (!state->mSignalledInputEOS) {
209                    size_t index;
210                    status_t err =
211                        state->mCodec->dequeueInputBuffer(&index, kTimeout);
212
213                    if (err == OK) {
214                        ALOGV("signalling input EOS on track %zu", i);
215
216                        err = state->mCodec->queueInputBuffer(
217                                index,
218                                0 /* offset */,
219                                0 /* size */,
220                                0ll /* timeUs */,
221                                MediaCodec::BUFFER_FLAG_EOS);
222
223                        CHECK_EQ(err, (status_t)OK);
224
225                        state->mSignalledInputEOS = true;
226                    } else {
227                        CHECK_EQ(err, -EAGAIN);
228                    }
229                }
230            }
231        }
232
233        bool sawOutputEOSOnAllTracks = true;
234        for (size_t i = 0; i < stateByTrack.size(); ++i) {
235            CodecState *state = &stateByTrack.editValueAt(i);
236            if (!state->mSawOutputEOS) {
237                sawOutputEOSOnAllTracks = false;
238                break;
239            }
240        }
241
242        if (sawOutputEOSOnAllTracks) {
243            break;
244        }
245
246        for (size_t i = 0; i < stateByTrack.size(); ++i) {
247            CodecState *state = &stateByTrack.editValueAt(i);
248
249            if (state->mSawOutputEOS) {
250                continue;
251            }
252
253            size_t index;
254            size_t offset;
255            size_t size;
256            int64_t presentationTimeUs;
257            uint32_t flags;
258            status_t err = state->mCodec->dequeueOutputBuffer(
259                    &index, &offset, &size, &presentationTimeUs, &flags,
260                    kTimeout);
261
262            if (err == OK) {
263                ALOGV("draining output buffer %zu, time = %lld us",
264                      index, (long long)presentationTimeUs);
265
266                ++state->mNumBuffersDecoded;
267                state->mNumBytesDecoded += size;
268
269                if (surface == NULL || !renderSurface) {
270                    err = state->mCodec->releaseOutputBuffer(index);
271                } else if (useTimestamp) {
272                    if (startTimeRender == -1) {
273                        // begin rendering 2 vsyncs (~33ms) after first decode
274                        startTimeRender =
275                                systemTime(SYSTEM_TIME_MONOTONIC) + 33000000
276                                - (presentationTimeUs * 1000);
277                    }
278                    presentationTimeUs =
279                            (presentationTimeUs * 1000) + startTimeRender;
280                    err = state->mCodec->renderOutputBufferAndRelease(
281                            index, presentationTimeUs);
282                } else {
283                    err = state->mCodec->renderOutputBufferAndRelease(index);
284                }
285
286                CHECK_EQ(err, (status_t)OK);
287
288                if (flags & MediaCodec::BUFFER_FLAG_EOS) {
289                    ALOGV("reached EOS on output.");
290
291                    state->mSawOutputEOS = true;
292                }
293            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
294                ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
295                CHECK_EQ((status_t)OK,
296                         state->mCodec->getOutputBuffers(&state->mOutBuffers));
297
298                ALOGV("got %zu output buffers", state->mOutBuffers.size());
299            } else if (err == INFO_FORMAT_CHANGED) {
300                sp<AMessage> format;
301                CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
302
303                ALOGV("INFO_FORMAT_CHANGED: %s", format->debugString().c_str());
304            } else {
305                CHECK_EQ(err, -EAGAIN);
306            }
307        }
308    }
309
310    int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs;
311
312    for (size_t i = 0; i < stateByTrack.size(); ++i) {
313        CodecState *state = &stateByTrack.editValueAt(i);
314
315        CHECK_EQ((status_t)OK, state->mCodec->release());
316
317        if (state->mIsAudio) {
318            printf("track %zu: %lld bytes received. %.2f KB/sec\n",
319                   i,
320                   (long long)state->mNumBytesDecoded,
321                   state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
322        } else {
323            printf("track %zu: %lld frames decoded, %.2f fps. %lld"
324                    " bytes received. %.2f KB/sec\n",
325                   i,
326                   (long long)state->mNumBuffersDecoded,
327                   state->mNumBuffersDecoded * 1E6 / elapsedTimeUs,
328                   (long long)state->mNumBytesDecoded,
329                   state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
330        }
331    }
332
333    return 0;
334}
335
336int main(int argc, char **argv) {
337    using namespace android;
338
339    const char *me = argv[0];
340
341    bool useAudio = false;
342    bool useVideo = false;
343    bool playback = false;
344    bool useSurface = false;
345    bool renderSurface = false;
346    bool useTimestamp = false;
347
348    int res;
349    while ((res = getopt(argc, argv, "havpSDRT")) >= 0) {
350        switch (res) {
351            case 'a':
352            {
353                useAudio = true;
354                break;
355            }
356            case 'v':
357            {
358                useVideo = true;
359                break;
360            }
361            case 'p':
362            {
363                playback = true;
364                break;
365            }
366            case 'T':
367            {
368                useTimestamp = true;
369            }
370            // fall through
371            case 'R':
372            {
373                renderSurface = true;
374            }
375            // fall through
376            case 'S':
377            {
378                useSurface = true;
379                break;
380            }
381            case '?':
382            case 'h':
383            default:
384            {
385                usage(me);
386            }
387        }
388    }
389
390    argc -= optind;
391    argv += optind;
392
393    if (argc != 1) {
394        usage(me);
395    }
396
397    if (!useAudio && !useVideo) {
398        useAudio = useVideo = true;
399    }
400
401    ProcessState::self()->startThreadPool();
402
403    sp<ALooper> looper = new ALooper;
404    looper->start();
405
406    sp<SurfaceComposerClient> composerClient;
407    sp<SurfaceControl> control;
408    sp<Surface> surface;
409
410    if (playback || (useSurface && useVideo)) {
411        composerClient = new SurfaceComposerClient;
412        CHECK_EQ(composerClient->initCheck(), (status_t)OK);
413
414        sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
415                ISurfaceComposer::eDisplayIdMain));
416        DisplayInfo info;
417        SurfaceComposerClient::getDisplayInfo(display, &info);
418        ssize_t displayWidth = info.w;
419        ssize_t displayHeight = info.h;
420
421        ALOGV("display is %zd x %zd\n", displayWidth, displayHeight);
422
423        control = composerClient->createSurface(
424                String8("A Surface"),
425                displayWidth,
426                displayHeight,
427                PIXEL_FORMAT_RGB_565,
428                0);
429
430        CHECK(control != NULL);
431        CHECK(control->isValid());
432
433        SurfaceComposerClient::openGlobalTransaction();
434        CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
435        CHECK_EQ(control->show(), (status_t)OK);
436        SurfaceComposerClient::closeGlobalTransaction();
437
438        surface = control->getSurface();
439        CHECK(surface != NULL);
440    }
441
442    if (playback) {
443        sp<SimplePlayer> player = new SimplePlayer;
444        looper->registerHandler(player);
445
446        player->setDataSource(argv[0]);
447        player->setSurface(surface->getIGraphicBufferProducer());
448        player->start();
449        sleep(60);
450        player->stop();
451        player->reset();
452    } else {
453        decode(looper, argv[0], useAudio, useVideo, surface, renderSurface,
454                useTimestamp);
455    }
456
457    if (playback || (useSurface && useVideo)) {
458        composerClient->dispose();
459    }
460
461    looper->stop();
462
463    return 0;
464}
465