NuPlayer.cpp revision 2c2814b900a61fa07ddfff860b143fbbe9c740e9
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "NuPlayer"
19#include <utils/Log.h>
20
21#include "NuPlayer.h"
22#include "NuPlayerDecoder.h"
23#include "NuPlayerRenderer.h"
24#include "NuPlayerStreamListener.h"
25
26#include <media/stagefright/foundation/ABuffer.h>
27#include <media/stagefright/foundation/ADebug.h>
28#include <media/stagefright/foundation/AMessage.h>
29#include <media/stagefright/ACodec.h>
30#include <media/stagefright/MediaErrors.h>
31#include <media/stagefright/MetaData.h>
32#include <surfaceflinger/Surface.h>
33
34namespace android {
35
36////////////////////////////////////////////////////////////////////////////////
37
38NuPlayer::NuPlayer()
39    : mEOS(false),
40      mAudioEOS(false),
41      mVideoEOS(false),
42      mFlushingAudio(NONE),
43      mFlushingVideo(NONE) {
44}
45
46NuPlayer::~NuPlayer() {
47}
48
49void NuPlayer::setListener(const wp<MediaPlayerBase> &listener) {
50    mListener = listener;
51}
52
53void NuPlayer::setDataSource(const sp<IStreamSource> &source) {
54    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
55
56    source->incStrong(this);
57    msg->setPointer("source", source.get());  // XXX unsafe.
58
59    msg->post();
60}
61
62void NuPlayer::setVideoSurface(const sp<Surface> &surface) {
63    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, id());
64    msg->setObject("surface", surface);
65    msg->post();
66}
67
68void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) {
69    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id());
70    msg->setObject("sink", sink);
71    msg->post();
72}
73
74void NuPlayer::start() {
75    (new AMessage(kWhatStart, id()))->post();
76}
77
78void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
79    switch (msg->what()) {
80        case kWhatSetDataSource:
81        {
82            LOGI("kWhatSetDataSource");
83
84            CHECK(mSource == NULL);
85
86            void *ptr;
87            CHECK(msg->findPointer("source", &ptr));
88
89            mSource = static_cast<IStreamSource *>(ptr);
90            mSource->decStrong(this);
91
92            mStreamListener = new NuPlayerStreamListener(mSource, id());
93            mTSParser = new ATSParser;
94            break;
95        }
96
97        case kWhatSetVideoSurface:
98        {
99            LOGI("kWhatSetVideoSurface");
100
101            sp<RefBase> obj;
102            CHECK(msg->findObject("surface", &obj));
103
104            mSurface = static_cast<Surface *>(obj.get());
105            break;
106        }
107
108        case kWhatSetAudioSink:
109        {
110            LOGI("kWhatSetAudioSink");
111
112            sp<RefBase> obj;
113            CHECK(msg->findObject("sink", &obj));
114
115            mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get());
116            break;
117        }
118
119        case kWhatStart:
120        {
121            mStreamListener->start();
122
123            mRenderer = new Renderer(
124                    mAudioSink,
125                    new AMessage(kWhatRendererNotify, id()));
126
127            looper()->registerHandler(mRenderer);
128
129            (new AMessage(kWhatScanSources, id()))->post();
130            break;
131        }
132
133        case kWhatScanSources:
134        {
135            instantiateDecoder(false, &mVideoDecoder);
136
137            if (mAudioSink != NULL) {
138                instantiateDecoder(true, &mAudioDecoder);
139            }
140
141            if (mEOS) {
142                break;
143            }
144
145            feedMoreTSData();
146
147            if (mAudioDecoder == NULL || mVideoDecoder == NULL) {
148                msg->post(100000ll);
149            }
150            break;
151        }
152
153        case kWhatVideoNotify:
154        case kWhatAudioNotify:
155        {
156            bool audio = msg->what() == kWhatAudioNotify;
157
158            sp<AMessage> codecRequest;
159            CHECK(msg->findMessage("codec-request", &codecRequest));
160
161            int32_t what;
162            CHECK(codecRequest->findInt32("what", &what));
163
164            if (what == ACodec::kWhatFillThisBuffer) {
165                status_t err = feedDecoderInputData(
166                        audio, codecRequest);
167
168                if (err == -EWOULDBLOCK && !mEOS) {
169                    feedMoreTSData();
170                    msg->post();
171                }
172            } else if (what == ACodec::kWhatEOS) {
173                mRenderer->queueEOS(audio, ERROR_END_OF_STREAM);
174            } else if (what == ACodec::kWhatFlushCompleted) {
175                if (audio) {
176                    CHECK_EQ((int)mFlushingAudio, (int)FLUSHING_DECODER);
177                    mFlushingAudio = FLUSHED;
178                } else {
179                    CHECK_EQ((int)mFlushingVideo, (int)FLUSHING_DECODER);
180                    mFlushingVideo = FLUSHED;
181                }
182
183                LOGI("decoder %s flush completed", audio ? "audio" : "video");
184
185                if (mFlushingAudio == FLUSHED && mFlushingVideo == FLUSHED) {
186                    LOGI("both audio and video are flushed now.");
187
188                    mRenderer->signalTimeDiscontinuity();
189
190                    if (mAudioDecoder != NULL) {
191                        mAudioDecoder->signalResume();
192                    }
193
194                    if (mVideoDecoder != NULL) {
195                        mVideoDecoder->signalResume();
196                    }
197
198                    mFlushingAudio = NONE;
199                    mFlushingVideo = NONE;
200                }
201            } else if (what == ACodec::kWhatOutputFormatChanged) {
202                CHECK(audio);
203
204                int32_t numChannels;
205                CHECK(codecRequest->findInt32("channel-count", &numChannels));
206
207                int32_t sampleRate;
208                CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
209
210                LOGI("Audio output format changed to %d Hz, %d channels",
211                     sampleRate, numChannels);
212
213                mAudioSink->close();
214                CHECK_EQ(mAudioSink->open(sampleRate, numChannels), (status_t)OK);
215                mAudioSink->start();
216            } else {
217                CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer);
218
219                renderBuffer(audio, codecRequest);
220            }
221
222            break;
223        }
224
225        case kWhatRendererNotify:
226        {
227            int32_t what;
228            CHECK(msg->findInt32("what", &what));
229
230            if (what == Renderer::kWhatEOS) {
231                int32_t audio;
232                CHECK(msg->findInt32("audio", &audio));
233
234                if (audio) {
235                    mAudioEOS = true;
236                } else {
237                    mVideoEOS = true;
238                }
239
240                LOGI("reached %s EOS", audio ? "audio" : "video");
241
242                if ((mAudioEOS || mAudioDecoder == NULL)
243                        && (mVideoEOS || mVideoDecoder == NULL)) {
244                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
245                }
246            } else {
247                CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete);
248
249                int32_t audio;
250                CHECK(msg->findInt32("audio", &audio));
251
252                LOGI("renderer %s flush completed.", audio ? "audio" : "video");
253            }
254            break;
255        }
256
257        case kWhatMoreDataQueued:
258        {
259            break;
260        }
261
262        default:
263            TRESPASS();
264            break;
265    }
266}
267
268void NuPlayer::feedMoreTSData() {
269    CHECK(!mEOS);
270
271    for (int32_t i = 0; i < 10; ++i) {
272        char buffer[188];
273        ssize_t n = mStreamListener->read(buffer, sizeof(buffer));
274
275        if (n == 0) {
276            LOGI("input data EOS reached.");
277            mTSParser->signalEOS(ERROR_END_OF_STREAM);
278            mEOS = true;
279            break;
280        } else if (n == INFO_DISCONTINUITY) {
281            mTSParser->signalDiscontinuity(ATSParser::DISCONTINUITY_SEEK);
282        } else if (n < 0) {
283            CHECK_EQ(n, -EWOULDBLOCK);
284            break;
285        } else {
286            if (buffer[0] == 0x00) {
287                // XXX legacy
288                mTSParser->signalDiscontinuity(ATSParser::DISCONTINUITY_SEEK);
289            } else {
290                mTSParser->feedTSPacket(buffer, sizeof(buffer));
291            }
292        }
293    }
294}
295
296status_t NuPlayer::dequeueNextAccessUnit(
297        ATSParser::SourceType *type, sp<ABuffer> *accessUnit) {
298    accessUnit->clear();
299
300    status_t audioErr = -EWOULDBLOCK;
301    int64_t audioTimeUs;
302
303    sp<AnotherPacketSource> audioSource =
304        static_cast<AnotherPacketSource *>(
305                mTSParser->getSource(ATSParser::MPEG2ADTS_AUDIO).get());
306
307    if (audioSource != NULL) {
308        audioErr = audioSource->nextBufferTime(&audioTimeUs);
309    }
310
311    status_t videoErr = -EWOULDBLOCK;
312    int64_t videoTimeUs;
313
314    sp<AnotherPacketSource> videoSource =
315        static_cast<AnotherPacketSource *>(
316                mTSParser->getSource(ATSParser::AVC_VIDEO).get());
317
318    if (videoSource != NULL) {
319        videoErr = videoSource->nextBufferTime(&videoTimeUs);
320    }
321
322    if (audioErr == -EWOULDBLOCK || videoErr == -EWOULDBLOCK) {
323        return -EWOULDBLOCK;
324    }
325
326    if (audioErr != OK && videoErr != OK) {
327        return audioErr;
328    }
329
330    if (videoErr != OK || (audioErr == OK && audioTimeUs < videoTimeUs)) {
331        *type = ATSParser::MPEG2ADTS_AUDIO;
332        return audioSource->dequeueAccessUnit(accessUnit);
333    } else {
334        *type = ATSParser::AVC_VIDEO;
335        return videoSource->dequeueAccessUnit(accessUnit);
336    }
337}
338
339status_t NuPlayer::dequeueAccessUnit(
340        ATSParser::SourceType type, sp<ABuffer> *accessUnit) {
341    sp<AnotherPacketSource> source =
342        static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
343
344    if (source == NULL) {
345        return -EWOULDBLOCK;
346    }
347
348    status_t finalResult;
349    if (!source->hasBufferAvailable(&finalResult)) {
350        return finalResult == OK ? -EWOULDBLOCK : finalResult;
351    }
352
353    return source->dequeueAccessUnit(accessUnit);
354}
355
356status_t NuPlayer::instantiateDecoder(
357        bool audio, sp<Decoder> *decoder) {
358    if (*decoder != NULL) {
359        return OK;
360    }
361
362    ATSParser::SourceType type =
363        audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO;
364
365    sp<AnotherPacketSource> source =
366        static_cast<AnotherPacketSource *>(
367                mTSParser->getSource(type).get());
368
369    if (source == NULL) {
370        return -EWOULDBLOCK;
371    }
372
373    sp<AMessage> notify =
374        new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
375                     id());
376
377    *decoder = new Decoder(notify, audio ? NULL : mSurface);
378    looper()->registerHandler(*decoder);
379
380    const sp<MetaData> &meta = source->getFormat();
381    (*decoder)->configure(meta);
382
383    return OK;
384}
385
386status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
387    sp<AMessage> reply;
388    CHECK(msg->findMessage("reply", &reply));
389
390    if ((audio && mFlushingAudio == FLUSHING_DECODER)
391            || (!audio && mFlushingVideo == FLUSHING_DECODER)) {
392        reply->setInt32("err", INFO_DISCONTINUITY);
393        reply->post();
394        return OK;
395    }
396
397    sp<ABuffer> accessUnit;
398    status_t err = dequeueAccessUnit(
399            audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO,
400            &accessUnit);
401
402    if (err == -EWOULDBLOCK) {
403        return err;
404    } else if (err != OK) {
405        if (err == INFO_DISCONTINUITY) {
406            LOGI("%s discontinuity", audio ? "audio" : "video");
407            (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
408            mRenderer->flush(audio);
409
410            if (audio) {
411                CHECK(mFlushingAudio == NONE
412                        || mFlushingAudio == AWAITING_DISCONTINUITY);
413                mFlushingAudio = FLUSHING_DECODER;
414                if (mFlushingVideo == NONE) {
415                    mFlushingVideo = (mVideoDecoder != NULL)
416                        ? AWAITING_DISCONTINUITY
417                        : FLUSHED;
418                }
419            } else {
420                CHECK(mFlushingVideo == NONE
421                        || mFlushingVideo == AWAITING_DISCONTINUITY);
422                mFlushingVideo = FLUSHING_DECODER;
423                if (mFlushingAudio == NONE) {
424                    mFlushingAudio = (mAudioDecoder != NULL)
425                        ? AWAITING_DISCONTINUITY
426                        : FLUSHED;
427                }
428            }
429        }
430
431        reply->setInt32("err", err);
432        reply->post();
433        return OK;
434    }
435
436    LOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
437
438#if 0
439    int64_t mediaTimeUs;
440    CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
441    LOGI("feeding %s input buffer at media time %.2f secs",
442         audio ? "audio" : "video",
443         mediaTimeUs / 1E6);
444#endif
445
446    reply->setObject("buffer", accessUnit);
447    reply->post();
448
449    return OK;
450}
451
452void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
453    LOGV("renderBuffer %s", audio ? "audio" : "video");
454
455    sp<AMessage> reply;
456    CHECK(msg->findMessage("reply", &reply));
457
458    sp<RefBase> obj;
459    CHECK(msg->findObject("buffer", &obj));
460
461    sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get());
462
463    mRenderer->queueBuffer(audio, buffer, reply);
464}
465
466void NuPlayer::notifyListener(int msg, int ext1, int ext2) {
467    if (mListener == NULL) {
468        return;
469    }
470
471    sp<MediaPlayerBase> listener = mListener.promote();
472
473    if (listener == NULL) {
474        return;
475    }
476
477    listener->sendEvent(msg, ext1, ext2);
478}
479
480}  // namespace android
481