1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "GenericSource"
19
20#include "GenericSource.h"
21
22#include "AnotherPacketSource.h"
23
24#include <media/IMediaHTTPService.h>
25#include <media/stagefright/foundation/ABuffer.h>
26#include <media/stagefright/foundation/ADebug.h>
27#include <media/stagefright/foundation/AMessage.h>
28#include <media/stagefright/DataSource.h>
29#include <media/stagefright/FileSource.h>
30#include <media/stagefright/MediaBuffer.h>
31#include <media/stagefright/MediaDefs.h>
32#include <media/stagefright/MediaExtractor.h>
33#include <media/stagefright/MediaSource.h>
34#include <media/stagefright/MetaData.h>
35#include <media/stagefright/Utils.h>
36#include "../../libstagefright/include/DRMExtractor.h"
37#include "../../libstagefright/include/NuCachedSource2.h"
38#include "../../libstagefright/include/WVMExtractor.h"
39#include "../../libstagefright/include/HTTPBase.h"
40
41namespace android {
42
43static int64_t kLowWaterMarkUs = 2000000ll;  // 2secs
44static int64_t kHighWaterMarkUs = 5000000ll;  // 5secs
45static int64_t kHighWaterMarkRebufferUs = 15000000ll;  // 15secs
46static const ssize_t kLowWaterMarkBytes = 40000;
47static const ssize_t kHighWaterMarkBytes = 200000;
48
49NuPlayer::GenericSource::GenericSource(
50        const sp<AMessage> &notify,
51        bool uidValid,
52        uid_t uid)
53    : Source(notify),
54      mAudioTimeUs(0),
55      mAudioLastDequeueTimeUs(0),
56      mVideoTimeUs(0),
57      mVideoLastDequeueTimeUs(0),
58      mFetchSubtitleDataGeneration(0),
59      mFetchTimedTextDataGeneration(0),
60      mDurationUs(-1ll),
61      mAudioIsVorbis(false),
62      mIsWidevine(false),
63      mIsSecure(false),
64      mIsStreaming(false),
65      mUIDValid(uidValid),
66      mUID(uid),
67      mFd(-1),
68      mDrmManagerClient(NULL),
69      mBitrate(-1ll),
70      mPendingReadBufferTypes(0) {
71    mBufferingMonitor = new BufferingMonitor(notify);
72    resetDataSource();
73    DataSource::RegisterDefaultSniffers();
74}
75
76void NuPlayer::GenericSource::resetDataSource() {
77    mHTTPService.clear();
78    mHttpSource.clear();
79    mUri.clear();
80    mUriHeaders.clear();
81    if (mFd >= 0) {
82        close(mFd);
83        mFd = -1;
84    }
85    mOffset = 0;
86    mLength = 0;
87    setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
88    mDecryptHandle = NULL;
89    mDrmManagerClient = NULL;
90    mStarted = false;
91    mStopRead = true;
92
93    if (mBufferingMonitorLooper != NULL) {
94        mBufferingMonitorLooper->unregisterHandler(mBufferingMonitor->id());
95        mBufferingMonitorLooper->stop();
96        mBufferingMonitorLooper = NULL;
97    }
98    mBufferingMonitor->stop();
99}
100
101status_t NuPlayer::GenericSource::setDataSource(
102        const sp<IMediaHTTPService> &httpService,
103        const char *url,
104        const KeyedVector<String8, String8> *headers) {
105    resetDataSource();
106
107    mHTTPService = httpService;
108    mUri = url;
109
110    if (headers) {
111        mUriHeaders = *headers;
112    }
113
114    // delay data source creation to prepareAsync() to avoid blocking
115    // the calling thread in setDataSource for any significant time.
116    return OK;
117}
118
119status_t NuPlayer::GenericSource::setDataSource(
120        int fd, int64_t offset, int64_t length) {
121    resetDataSource();
122
123    mFd = dup(fd);
124    mOffset = offset;
125    mLength = length;
126
127    // delay data source creation to prepareAsync() to avoid blocking
128    // the calling thread in setDataSource for any significant time.
129    return OK;
130}
131
132status_t NuPlayer::GenericSource::setDataSource(const sp<DataSource>& source) {
133    resetDataSource();
134    mDataSource = source;
135    return OK;
136}
137
138sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const {
139    return mFileMeta;
140}
141
142status_t NuPlayer::GenericSource::initFromDataSource() {
143    sp<IMediaExtractor> extractor;
144    String8 mimeType;
145    float confidence;
146    sp<AMessage> dummy;
147    bool isWidevineStreaming = false;
148
149    CHECK(mDataSource != NULL);
150
151    if (mIsWidevine) {
152        isWidevineStreaming = SniffWVM(
153                mDataSource, &mimeType, &confidence, &dummy);
154        if (!isWidevineStreaming ||
155                strcasecmp(
156                    mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) {
157            ALOGE("unsupported widevine mime: %s", mimeType.string());
158            return UNKNOWN_ERROR;
159        }
160    } else if (mIsStreaming) {
161        if (!mDataSource->sniff(&mimeType, &confidence, &dummy)) {
162            return UNKNOWN_ERROR;
163        }
164        isWidevineStreaming = !strcasecmp(
165                mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM);
166    }
167
168    if (isWidevineStreaming) {
169        // we don't want cached source for widevine streaming.
170        mCachedSource.clear();
171        mDataSource = mHttpSource;
172        mWVMExtractor = new WVMExtractor(mDataSource);
173        mWVMExtractor->setAdaptiveStreamingMode(true);
174        if (mUIDValid) {
175            mWVMExtractor->setUID(mUID);
176        }
177        extractor = mWVMExtractor;
178    } else {
179        extractor = MediaExtractor::Create(mDataSource,
180                mimeType.isEmpty() ? NULL : mimeType.string());
181    }
182
183    if (extractor == NULL) {
184        return UNKNOWN_ERROR;
185    }
186
187    if (extractor->getDrmFlag()) {
188        checkDrmStatus(mDataSource);
189    }
190
191    mFileMeta = extractor->getMetaData();
192    if (mFileMeta != NULL) {
193        int64_t duration;
194        if (mFileMeta->findInt64(kKeyDuration, &duration)) {
195            mDurationUs = duration;
196        }
197
198        if (!mIsWidevine) {
199            // Check mime to see if we actually have a widevine source.
200            // If the data source is not URL-type (eg. file source), we
201            // won't be able to tell until now.
202            const char *fileMime;
203            if (mFileMeta->findCString(kKeyMIMEType, &fileMime)
204                    && !strncasecmp(fileMime, "video/wvm", 9)) {
205                mIsWidevine = true;
206            }
207        }
208    }
209
210    int32_t totalBitrate = 0;
211
212    size_t numtracks = extractor->countTracks();
213    if (numtracks == 0) {
214        return UNKNOWN_ERROR;
215    }
216
217    for (size_t i = 0; i < numtracks; ++i) {
218        sp<IMediaSource> track = extractor->getTrack(i);
219        if (track == NULL) {
220            continue;
221        }
222
223        sp<MetaData> meta = extractor->getTrackMetaData(i);
224        if (meta == NULL) {
225            ALOGE("no metadata for track %zu", i);
226            return UNKNOWN_ERROR;
227        }
228
229        const char *mime;
230        CHECK(meta->findCString(kKeyMIMEType, &mime));
231
232        // Do the string compare immediately with "mime",
233        // we can't assume "mime" would stay valid after another
234        // extractor operation, some extractors might modify meta
235        // during getTrack() and make it invalid.
236        if (!strncasecmp(mime, "audio/", 6)) {
237            if (mAudioTrack.mSource == NULL) {
238                mAudioTrack.mIndex = i;
239                mAudioTrack.mSource = track;
240                mAudioTrack.mPackets =
241                    new AnotherPacketSource(mAudioTrack.mSource->getFormat());
242
243                if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
244                    mAudioIsVorbis = true;
245                } else {
246                    mAudioIsVorbis = false;
247                }
248            }
249        } else if (!strncasecmp(mime, "video/", 6)) {
250            if (mVideoTrack.mSource == NULL) {
251                mVideoTrack.mIndex = i;
252                mVideoTrack.mSource = track;
253                mVideoTrack.mPackets =
254                    new AnotherPacketSource(mVideoTrack.mSource->getFormat());
255
256                // check if the source requires secure buffers
257                int32_t secure;
258                if (meta->findInt32(kKeyRequiresSecureBuffers, &secure)
259                        && secure) {
260                    mIsSecure = true;
261                    if (mUIDValid) {
262                        extractor->setUID(mUID);
263                    }
264                }
265            }
266        }
267
268        mSources.push(track);
269        int64_t durationUs;
270        if (meta->findInt64(kKeyDuration, &durationUs)) {
271            if (durationUs > mDurationUs) {
272                mDurationUs = durationUs;
273            }
274        }
275
276        int32_t bitrate;
277        if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
278            totalBitrate += bitrate;
279        } else {
280            totalBitrate = -1;
281        }
282    }
283
284    if (mSources.size() == 0) {
285        ALOGE("b/23705695");
286        return UNKNOWN_ERROR;
287    }
288
289    mBitrate = totalBitrate;
290
291    return OK;
292}
293
294status_t NuPlayer::GenericSource::startSources() {
295    // Start the selected A/V tracks now before we start buffering.
296    // Widevine sources might re-initialize crypto when starting, if we delay
297    // this to start(), all data buffered during prepare would be wasted.
298    // (We don't actually start reading until start().)
299    if (mAudioTrack.mSource != NULL && mAudioTrack.mSource->start() != OK) {
300        ALOGE("failed to start audio track!");
301        return UNKNOWN_ERROR;
302    }
303
304    if (mVideoTrack.mSource != NULL && mVideoTrack.mSource->start() != OK) {
305        ALOGE("failed to start video track!");
306        return UNKNOWN_ERROR;
307    }
308
309    return OK;
310}
311
312void NuPlayer::GenericSource::checkDrmStatus(const sp<DataSource>& dataSource) {
313    dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
314    if (mDecryptHandle != NULL) {
315        CHECK(mDrmManagerClient);
316        if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
317            sp<AMessage> msg = dupNotify();
318            msg->setInt32("what", kWhatDrmNoLicense);
319            msg->post();
320        }
321    }
322}
323
324int64_t NuPlayer::GenericSource::getLastReadPosition() {
325    if (mAudioTrack.mSource != NULL) {
326        return mAudioTimeUs;
327    } else if (mVideoTrack.mSource != NULL) {
328        return mVideoTimeUs;
329    } else {
330        return 0;
331    }
332}
333
334status_t NuPlayer::GenericSource::setBuffers(
335        bool audio, Vector<MediaBuffer *> &buffers) {
336    if (mIsSecure && !audio && mVideoTrack.mSource != NULL) {
337        return mVideoTrack.mSource->setBuffers(buffers);
338    }
339    return INVALID_OPERATION;
340}
341
342bool NuPlayer::GenericSource::isStreaming() const {
343    return mIsStreaming;
344}
345
346void NuPlayer::GenericSource::setOffloadAudio(bool offload) {
347    mBufferingMonitor->setOffloadAudio(offload);
348}
349
350NuPlayer::GenericSource::~GenericSource() {
351    if (mLooper != NULL) {
352        mLooper->unregisterHandler(id());
353        mLooper->stop();
354    }
355    resetDataSource();
356}
357
358void NuPlayer::GenericSource::prepareAsync() {
359    if (mLooper == NULL) {
360        mLooper = new ALooper;
361        mLooper->setName("generic");
362        mLooper->start();
363
364        mLooper->registerHandler(this);
365    }
366
367    sp<AMessage> msg = new AMessage(kWhatPrepareAsync, this);
368    msg->post();
369}
370
371void NuPlayer::GenericSource::onPrepareAsync() {
372    // delayed data source creation
373    if (mDataSource == NULL) {
374        // set to false first, if the extractor
375        // comes back as secure, set it to true then.
376        mIsSecure = false;
377
378        if (!mUri.empty()) {
379            const char* uri = mUri.c_str();
380            String8 contentType;
381            mIsWidevine = !strncasecmp(uri, "widevine://", 11);
382
383            if (!strncasecmp("http://", uri, 7)
384                    || !strncasecmp("https://", uri, 8)
385                    || mIsWidevine) {
386                mHttpSource = DataSource::CreateMediaHTTP(mHTTPService);
387                if (mHttpSource == NULL) {
388                    ALOGE("Failed to create http source!");
389                    notifyPreparedAndCleanup(UNKNOWN_ERROR);
390                    return;
391                }
392            }
393
394            mDataSource = DataSource::CreateFromURI(
395                   mHTTPService, uri, &mUriHeaders, &contentType,
396                   static_cast<HTTPBase *>(mHttpSource.get()));
397        } else {
398            mIsWidevine = false;
399
400            mDataSource = new FileSource(mFd, mOffset, mLength);
401            mFd = -1;
402        }
403
404        if (mDataSource == NULL) {
405            ALOGE("Failed to create data source!");
406            notifyPreparedAndCleanup(UNKNOWN_ERROR);
407            return;
408        }
409    }
410
411    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
412        mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
413    }
414
415    // For widevine or other cached streaming cases, we need to wait for
416    // enough buffering before reporting prepared.
417    // Note that even when URL doesn't start with widevine://, mIsWidevine
418    // could still be set to true later, if the streaming or file source
419    // is sniffed to be widevine. We don't want to buffer for file source
420    // in that case, so must check the flag now.
421    mIsStreaming = (mIsWidevine || mCachedSource != NULL);
422
423    // init extractor from data source
424    status_t err = initFromDataSource();
425
426    if (err != OK) {
427        ALOGE("Failed to init from data source!");
428        notifyPreparedAndCleanup(err);
429        return;
430    }
431
432    if (mVideoTrack.mSource != NULL) {
433        sp<MetaData> meta = doGetFormatMeta(false /* audio */);
434        sp<AMessage> msg = new AMessage;
435        err = convertMetaDataToMessage(meta, &msg);
436        if(err != OK) {
437            notifyPreparedAndCleanup(err);
438            return;
439        }
440        notifyVideoSizeChanged(msg);
441    }
442
443    notifyFlagsChanged(
444            (mIsSecure ? FLAG_SECURE : 0)
445            | (mDecryptHandle != NULL ? FLAG_PROTECTED : 0)
446            | FLAG_CAN_PAUSE
447            | FLAG_CAN_SEEK_BACKWARD
448            | FLAG_CAN_SEEK_FORWARD
449            | FLAG_CAN_SEEK);
450
451    if (mIsSecure) {
452        // secure decoders must be instantiated before starting widevine source
453        sp<AMessage> reply = new AMessage(kWhatSecureDecodersInstantiated, this);
454        notifyInstantiateSecureDecoders(reply);
455    } else {
456        finishPrepareAsync();
457    }
458}
459
460void NuPlayer::GenericSource::onSecureDecodersInstantiated(status_t err) {
461    if (err != OK) {
462        ALOGE("Failed to instantiate secure decoders!");
463        notifyPreparedAndCleanup(err);
464        return;
465    }
466    finishPrepareAsync();
467}
468
469void NuPlayer::GenericSource::finishPrepareAsync() {
470    status_t err = startSources();
471    if (err != OK) {
472        ALOGE("Failed to init start data source!");
473        notifyPreparedAndCleanup(err);
474        return;
475    }
476
477    if (mIsStreaming) {
478        if (mBufferingMonitorLooper == NULL) {
479            mBufferingMonitor->prepare(mCachedSource, mWVMExtractor, mDurationUs, mBitrate,
480                    mIsStreaming);
481
482            mBufferingMonitorLooper = new ALooper;
483            mBufferingMonitorLooper->setName("GSBMonitor");
484            mBufferingMonitorLooper->start();
485            mBufferingMonitorLooper->registerHandler(mBufferingMonitor);
486        }
487
488        mBufferingMonitor->ensureCacheIsFetching();
489        mBufferingMonitor->restartPollBuffering();
490    } else {
491        notifyPrepared();
492    }
493}
494
495void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {
496    if (err != OK) {
497        {
498            sp<DataSource> dataSource = mDataSource;
499            sp<NuCachedSource2> cachedSource = mCachedSource;
500            sp<DataSource> httpSource = mHttpSource;
501            {
502                Mutex::Autolock _l(mDisconnectLock);
503                mDataSource.clear();
504                mDecryptHandle = NULL;
505                mDrmManagerClient = NULL;
506                mCachedSource.clear();
507                mHttpSource.clear();
508            }
509        }
510        mBitrate = -1;
511
512        mBufferingMonitor->cancelPollBuffering();
513    }
514    notifyPrepared(err);
515}
516
517void NuPlayer::GenericSource::start() {
518    ALOGI("start");
519
520    mStopRead = false;
521    if (mAudioTrack.mSource != NULL) {
522        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
523    }
524
525    if (mVideoTrack.mSource != NULL) {
526        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
527    }
528
529    setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);
530    mStarted = true;
531
532    (new AMessage(kWhatStart, this))->post();
533}
534
535void NuPlayer::GenericSource::stop() {
536    // nothing to do, just account for DRM playback status
537    setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
538    mStarted = false;
539    if (mIsWidevine || mIsSecure) {
540        // For widevine or secure sources we need to prevent any further reads.
541        sp<AMessage> msg = new AMessage(kWhatStopWidevine, this);
542        sp<AMessage> response;
543        (void) msg->postAndAwaitResponse(&response);
544    }
545}
546
547void NuPlayer::GenericSource::pause() {
548    // nothing to do, just account for DRM playback status
549    setDrmPlaybackStatusIfNeeded(Playback::PAUSE, 0);
550    mStarted = false;
551}
552
553void NuPlayer::GenericSource::resume() {
554    // nothing to do, just account for DRM playback status
555    setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);
556    mStarted = true;
557
558    (new AMessage(kWhatResume, this))->post();
559}
560
561void NuPlayer::GenericSource::disconnect() {
562    sp<DataSource> dataSource, httpSource;
563    {
564        Mutex::Autolock _l(mDisconnectLock);
565        dataSource = mDataSource;
566        httpSource = mHttpSource;
567    }
568
569    if (dataSource != NULL) {
570        // disconnect data source
571        if (dataSource->flags() & DataSource::kIsCachingDataSource) {
572            static_cast<NuCachedSource2 *>(dataSource.get())->disconnect();
573        }
574    } else if (httpSource != NULL) {
575        static_cast<HTTPBase *>(httpSource.get())->disconnect();
576    }
577}
578
579void NuPlayer::GenericSource::setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position) {
580    if (mDecryptHandle != NULL) {
581        mDrmManagerClient->setPlaybackStatus(mDecryptHandle, playbackStatus, position);
582    }
583    mSubtitleTrack.mPackets = new AnotherPacketSource(NULL);
584    mTimedTextTrack.mPackets = new AnotherPacketSource(NULL);
585}
586
587status_t NuPlayer::GenericSource::feedMoreTSData() {
588    return OK;
589}
590
591void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
592    switch (msg->what()) {
593      case kWhatPrepareAsync:
594      {
595          onPrepareAsync();
596          break;
597      }
598      case kWhatFetchSubtitleData:
599      {
600          fetchTextData(kWhatSendSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
601                  mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
602          break;
603      }
604
605      case kWhatFetchTimedTextData:
606      {
607          fetchTextData(kWhatSendTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
608                  mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
609          break;
610      }
611
612      case kWhatSendSubtitleData:
613      {
614          sendTextData(kWhatSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
615                  mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
616          break;
617      }
618
619      case kWhatSendGlobalTimedTextData:
620      {
621          sendGlobalTextData(kWhatTimedTextData, mFetchTimedTextDataGeneration, msg);
622          break;
623      }
624      case kWhatSendTimedTextData:
625      {
626          sendTextData(kWhatTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
627                  mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
628          break;
629      }
630
631      case kWhatChangeAVSource:
632      {
633          int32_t trackIndex;
634          CHECK(msg->findInt32("trackIndex", &trackIndex));
635          const sp<IMediaSource> source = mSources.itemAt(trackIndex);
636
637          Track* track;
638          const char *mime;
639          media_track_type trackType, counterpartType;
640          sp<MetaData> meta = source->getFormat();
641          meta->findCString(kKeyMIMEType, &mime);
642          if (!strncasecmp(mime, "audio/", 6)) {
643              track = &mAudioTrack;
644              trackType = MEDIA_TRACK_TYPE_AUDIO;
645              counterpartType = MEDIA_TRACK_TYPE_VIDEO;;
646          } else {
647              CHECK(!strncasecmp(mime, "video/", 6));
648              track = &mVideoTrack;
649              trackType = MEDIA_TRACK_TYPE_VIDEO;
650              counterpartType = MEDIA_TRACK_TYPE_AUDIO;;
651          }
652
653
654          if (track->mSource != NULL) {
655              track->mSource->stop();
656          }
657          track->mSource = source;
658          track->mSource->start();
659          track->mIndex = trackIndex;
660
661          int64_t timeUs, actualTimeUs;
662          const bool formatChange = true;
663          if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
664              timeUs = mAudioLastDequeueTimeUs;
665          } else {
666              timeUs = mVideoLastDequeueTimeUs;
667          }
668          readBuffer(trackType, timeUs, &actualTimeUs, formatChange);
669          readBuffer(counterpartType, -1, NULL, formatChange);
670          ALOGV("timeUs %lld actualTimeUs %lld", (long long)timeUs, (long long)actualTimeUs);
671
672          break;
673      }
674
675      case kWhatStart:
676      case kWhatResume:
677      {
678          mBufferingMonitor->restartPollBuffering();
679          break;
680      }
681
682      case kWhatGetFormat:
683      {
684          onGetFormatMeta(msg);
685          break;
686      }
687
688      case kWhatGetSelectedTrack:
689      {
690          onGetSelectedTrack(msg);
691          break;
692      }
693
694      case kWhatSelectTrack:
695      {
696          onSelectTrack(msg);
697          break;
698      }
699
700      case kWhatSeek:
701      {
702          onSeek(msg);
703          break;
704      }
705
706      case kWhatReadBuffer:
707      {
708          onReadBuffer(msg);
709          break;
710      }
711
712      case kWhatSecureDecodersInstantiated:
713      {
714          int32_t err;
715          CHECK(msg->findInt32("err", &err));
716          onSecureDecodersInstantiated(err);
717          break;
718      }
719
720      case kWhatStopWidevine:
721      {
722          // mStopRead is only used for Widevine to prevent the video source
723          // from being read while the associated video decoder is shutting down.
724          mStopRead = true;
725          if (mVideoTrack.mSource != NULL) {
726              mVideoTrack.mPackets->clear();
727          }
728          sp<AMessage> response = new AMessage;
729          sp<AReplyToken> replyID;
730          CHECK(msg->senderAwaitsResponse(&replyID));
731          response->postReply(replyID);
732          break;
733      }
734      default:
735          Source::onMessageReceived(msg);
736          break;
737    }
738}
739
740void NuPlayer::GenericSource::fetchTextData(
741        uint32_t sendWhat,
742        media_track_type type,
743        int32_t curGen,
744        sp<AnotherPacketSource> packets,
745        sp<AMessage> msg) {
746    int32_t msgGeneration;
747    CHECK(msg->findInt32("generation", &msgGeneration));
748    if (msgGeneration != curGen) {
749        // stale
750        return;
751    }
752
753    int32_t avail;
754    if (packets->hasBufferAvailable(&avail)) {
755        return;
756    }
757
758    int64_t timeUs;
759    CHECK(msg->findInt64("timeUs", &timeUs));
760
761    int64_t subTimeUs;
762    readBuffer(type, timeUs, &subTimeUs);
763
764    int64_t delayUs = subTimeUs - timeUs;
765    if (msg->what() == kWhatFetchSubtitleData) {
766        const int64_t oneSecUs = 1000000ll;
767        delayUs -= oneSecUs;
768    }
769    sp<AMessage> msg2 = new AMessage(sendWhat, this);
770    msg2->setInt32("generation", msgGeneration);
771    msg2->post(delayUs < 0 ? 0 : delayUs);
772}
773
774void NuPlayer::GenericSource::sendTextData(
775        uint32_t what,
776        media_track_type type,
777        int32_t curGen,
778        sp<AnotherPacketSource> packets,
779        sp<AMessage> msg) {
780    int32_t msgGeneration;
781    CHECK(msg->findInt32("generation", &msgGeneration));
782    if (msgGeneration != curGen) {
783        // stale
784        return;
785    }
786
787    int64_t subTimeUs;
788    if (packets->nextBufferTime(&subTimeUs) != OK) {
789        return;
790    }
791
792    int64_t nextSubTimeUs;
793    readBuffer(type, -1, &nextSubTimeUs);
794
795    sp<ABuffer> buffer;
796    status_t dequeueStatus = packets->dequeueAccessUnit(&buffer);
797    if (dequeueStatus == OK) {
798        sp<AMessage> notify = dupNotify();
799        notify->setInt32("what", what);
800        notify->setBuffer("buffer", buffer);
801        notify->post();
802
803        const int64_t delayUs = nextSubTimeUs - subTimeUs;
804        msg->post(delayUs < 0 ? 0 : delayUs);
805    }
806}
807
808void NuPlayer::GenericSource::sendGlobalTextData(
809        uint32_t what,
810        int32_t curGen,
811        sp<AMessage> msg) {
812    int32_t msgGeneration;
813    CHECK(msg->findInt32("generation", &msgGeneration));
814    if (msgGeneration != curGen) {
815        // stale
816        return;
817    }
818
819    uint32_t textType;
820    const void *data;
821    size_t size = 0;
822    if (mTimedTextTrack.mSource->getFormat()->findData(
823                    kKeyTextFormatData, &textType, &data, &size)) {
824        mGlobalTimedText = new ABuffer(size);
825        if (mGlobalTimedText->data()) {
826            memcpy(mGlobalTimedText->data(), data, size);
827            sp<AMessage> globalMeta = mGlobalTimedText->meta();
828            globalMeta->setInt64("timeUs", 0);
829            globalMeta->setString("mime", MEDIA_MIMETYPE_TEXT_3GPP);
830            globalMeta->setInt32("global", 1);
831            sp<AMessage> notify = dupNotify();
832            notify->setInt32("what", what);
833            notify->setBuffer("buffer", mGlobalTimedText);
834            notify->post();
835        }
836    }
837}
838
839sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) {
840    sp<AMessage> msg = new AMessage(kWhatGetFormat, this);
841    msg->setInt32("audio", audio);
842
843    sp<AMessage> response;
844    sp<RefBase> format;
845    status_t err = msg->postAndAwaitResponse(&response);
846    if (err == OK && response != NULL) {
847        CHECK(response->findObject("format", &format));
848        return static_cast<MetaData*>(format.get());
849    } else {
850        return NULL;
851    }
852}
853
854void NuPlayer::GenericSource::onGetFormatMeta(sp<AMessage> msg) const {
855    int32_t audio;
856    CHECK(msg->findInt32("audio", &audio));
857
858    sp<AMessage> response = new AMessage;
859    sp<MetaData> format = doGetFormatMeta(audio);
860    response->setObject("format", format);
861
862    sp<AReplyToken> replyID;
863    CHECK(msg->senderAwaitsResponse(&replyID));
864    response->postReply(replyID);
865}
866
867sp<MetaData> NuPlayer::GenericSource::doGetFormatMeta(bool audio) const {
868    sp<IMediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
869
870    if (source == NULL) {
871        return NULL;
872    }
873
874    return source->getFormat();
875}
876
877status_t NuPlayer::GenericSource::dequeueAccessUnit(
878        bool audio, sp<ABuffer> *accessUnit) {
879    if (audio && !mStarted) {
880        return -EWOULDBLOCK;
881    }
882
883    Track *track = audio ? &mAudioTrack : &mVideoTrack;
884
885    if (track->mSource == NULL) {
886        return -EWOULDBLOCK;
887    }
888
889    if (mIsWidevine && !audio) {
890        // try to read a buffer as we may not have been able to the last time
891        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
892    }
893
894    status_t finalResult;
895    if (!track->mPackets->hasBufferAvailable(&finalResult)) {
896        if (finalResult == OK) {
897            postReadBuffer(
898                    audio ? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
899            return -EWOULDBLOCK;
900        }
901        return finalResult;
902    }
903
904    status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
905
906    // start pulling in more buffers if we only have one (or no) buffer left
907    // so that decoder has less chance of being starved
908    if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {
909        postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
910    }
911
912    if (result != OK) {
913        if (mSubtitleTrack.mSource != NULL) {
914            mSubtitleTrack.mPackets->clear();
915            mFetchSubtitleDataGeneration++;
916        }
917        if (mTimedTextTrack.mSource != NULL) {
918            mTimedTextTrack.mPackets->clear();
919            mFetchTimedTextDataGeneration++;
920        }
921        return result;
922    }
923
924    int64_t timeUs;
925    status_t eosResult; // ignored
926    CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
927    if (audio) {
928        mAudioLastDequeueTimeUs = timeUs;
929        mBufferingMonitor->updateDequeuedBufferTime(timeUs);
930    } else {
931        mVideoLastDequeueTimeUs = timeUs;
932    }
933
934    if (mSubtitleTrack.mSource != NULL
935            && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
936        sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
937        msg->setInt64("timeUs", timeUs);
938        msg->setInt32("generation", mFetchSubtitleDataGeneration);
939        msg->post();
940    }
941
942    if (mTimedTextTrack.mSource != NULL
943            && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
944        sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);
945        msg->setInt64("timeUs", timeUs);
946        msg->setInt32("generation", mFetchTimedTextDataGeneration);
947        msg->post();
948    }
949
950    return result;
951}
952
953status_t NuPlayer::GenericSource::getDuration(int64_t *durationUs) {
954    *durationUs = mDurationUs;
955    return OK;
956}
957
958size_t NuPlayer::GenericSource::getTrackCount() const {
959    return mSources.size();
960}
961
962sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {
963    size_t trackCount = mSources.size();
964    if (trackIndex >= trackCount) {
965        return NULL;
966    }
967
968    sp<AMessage> format = new AMessage();
969    sp<MetaData> meta = mSources.itemAt(trackIndex)->getFormat();
970    if (meta == NULL) {
971        ALOGE("no metadata for track %zu", trackIndex);
972        return NULL;
973    }
974
975    const char *mime;
976    CHECK(meta->findCString(kKeyMIMEType, &mime));
977    format->setString("mime", mime);
978
979    int32_t trackType;
980    if (!strncasecmp(mime, "video/", 6)) {
981        trackType = MEDIA_TRACK_TYPE_VIDEO;
982    } else if (!strncasecmp(mime, "audio/", 6)) {
983        trackType = MEDIA_TRACK_TYPE_AUDIO;
984    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
985        trackType = MEDIA_TRACK_TYPE_TIMEDTEXT;
986    } else {
987        trackType = MEDIA_TRACK_TYPE_UNKNOWN;
988    }
989    format->setInt32("type", trackType);
990
991    const char *lang;
992    if (!meta->findCString(kKeyMediaLanguage, &lang)) {
993        lang = "und";
994    }
995    format->setString("language", lang);
996
997    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
998        int32_t isAutoselect = 1, isDefault = 0, isForced = 0;
999        meta->findInt32(kKeyTrackIsAutoselect, &isAutoselect);
1000        meta->findInt32(kKeyTrackIsDefault, &isDefault);
1001        meta->findInt32(kKeyTrackIsForced, &isForced);
1002
1003        format->setInt32("auto", !!isAutoselect);
1004        format->setInt32("default", !!isDefault);
1005        format->setInt32("forced", !!isForced);
1006    }
1007
1008    return format;
1009}
1010
1011ssize_t NuPlayer::GenericSource::getSelectedTrack(media_track_type type) const {
1012    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this);
1013    msg->setInt32("type", type);
1014
1015    sp<AMessage> response;
1016    int32_t index;
1017    status_t err = msg->postAndAwaitResponse(&response);
1018    if (err == OK && response != NULL) {
1019        CHECK(response->findInt32("index", &index));
1020        return index;
1021    } else {
1022        return -1;
1023    }
1024}
1025
1026void NuPlayer::GenericSource::onGetSelectedTrack(sp<AMessage> msg) const {
1027    int32_t tmpType;
1028    CHECK(msg->findInt32("type", &tmpType));
1029    media_track_type type = (media_track_type)tmpType;
1030
1031    sp<AMessage> response = new AMessage;
1032    ssize_t index = doGetSelectedTrack(type);
1033    response->setInt32("index", index);
1034
1035    sp<AReplyToken> replyID;
1036    CHECK(msg->senderAwaitsResponse(&replyID));
1037    response->postReply(replyID);
1038}
1039
1040ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const {
1041    const Track *track = NULL;
1042    switch (type) {
1043    case MEDIA_TRACK_TYPE_VIDEO:
1044        track = &mVideoTrack;
1045        break;
1046    case MEDIA_TRACK_TYPE_AUDIO:
1047        track = &mAudioTrack;
1048        break;
1049    case MEDIA_TRACK_TYPE_TIMEDTEXT:
1050        track = &mTimedTextTrack;
1051        break;
1052    case MEDIA_TRACK_TYPE_SUBTITLE:
1053        track = &mSubtitleTrack;
1054        break;
1055    default:
1056        break;
1057    }
1058
1059    if (track != NULL && track->mSource != NULL) {
1060        return track->mIndex;
1061    }
1062
1063    return -1;
1064}
1065
1066status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
1067    ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
1068    sp<AMessage> msg = new AMessage(kWhatSelectTrack, this);
1069    msg->setInt32("trackIndex", trackIndex);
1070    msg->setInt32("select", select);
1071    msg->setInt64("timeUs", timeUs);
1072
1073    sp<AMessage> response;
1074    status_t err = msg->postAndAwaitResponse(&response);
1075    if (err == OK && response != NULL) {
1076        CHECK(response->findInt32("err", &err));
1077    }
1078
1079    return err;
1080}
1081
1082void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) {
1083    int32_t trackIndex, select;
1084    int64_t timeUs;
1085    CHECK(msg->findInt32("trackIndex", &trackIndex));
1086    CHECK(msg->findInt32("select", &select));
1087    CHECK(msg->findInt64("timeUs", &timeUs));
1088
1089    sp<AMessage> response = new AMessage;
1090    status_t err = doSelectTrack(trackIndex, select, timeUs);
1091    response->setInt32("err", err);
1092
1093    sp<AReplyToken> replyID;
1094    CHECK(msg->senderAwaitsResponse(&replyID));
1095    response->postReply(replyID);
1096}
1097
1098status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, int64_t timeUs) {
1099    if (trackIndex >= mSources.size()) {
1100        return BAD_INDEX;
1101    }
1102
1103    if (!select) {
1104        Track* track = NULL;
1105        if (mSubtitleTrack.mSource != NULL && trackIndex == mSubtitleTrack.mIndex) {
1106            track = &mSubtitleTrack;
1107            mFetchSubtitleDataGeneration++;
1108        } else if (mTimedTextTrack.mSource != NULL && trackIndex == mTimedTextTrack.mIndex) {
1109            track = &mTimedTextTrack;
1110            mFetchTimedTextDataGeneration++;
1111        }
1112        if (track == NULL) {
1113            return INVALID_OPERATION;
1114        }
1115        track->mSource->stop();
1116        track->mSource = NULL;
1117        track->mPackets->clear();
1118        return OK;
1119    }
1120
1121    const sp<IMediaSource> source = mSources.itemAt(trackIndex);
1122    sp<MetaData> meta = source->getFormat();
1123    const char *mime;
1124    CHECK(meta->findCString(kKeyMIMEType, &mime));
1125    if (!strncasecmp(mime, "text/", 5)) {
1126        bool isSubtitle = strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP);
1127        Track *track = isSubtitle ? &mSubtitleTrack : &mTimedTextTrack;
1128        if (track->mSource != NULL && track->mIndex == trackIndex) {
1129            return OK;
1130        }
1131        track->mIndex = trackIndex;
1132        if (track->mSource != NULL) {
1133            track->mSource->stop();
1134        }
1135        track->mSource = mSources.itemAt(trackIndex);
1136        track->mSource->start();
1137        if (track->mPackets == NULL) {
1138            track->mPackets = new AnotherPacketSource(track->mSource->getFormat());
1139        } else {
1140            track->mPackets->clear();
1141            track->mPackets->setFormat(track->mSource->getFormat());
1142
1143        }
1144
1145        if (isSubtitle) {
1146            mFetchSubtitleDataGeneration++;
1147        } else {
1148            mFetchTimedTextDataGeneration++;
1149        }
1150
1151        status_t eosResult; // ignored
1152        if (mSubtitleTrack.mSource != NULL
1153                && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
1154            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);
1155            msg->setInt64("timeUs", timeUs);
1156            msg->setInt32("generation", mFetchSubtitleDataGeneration);
1157            msg->post();
1158        }
1159
1160        sp<AMessage> msg2 = new AMessage(kWhatSendGlobalTimedTextData, this);
1161        msg2->setInt32("generation", mFetchTimedTextDataGeneration);
1162        msg2->post();
1163
1164        if (mTimedTextTrack.mSource != NULL
1165                && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
1166            sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);
1167            msg->setInt64("timeUs", timeUs);
1168            msg->setInt32("generation", mFetchTimedTextDataGeneration);
1169            msg->post();
1170        }
1171
1172        return OK;
1173    } else if (!strncasecmp(mime, "audio/", 6) || !strncasecmp(mime, "video/", 6)) {
1174        bool audio = !strncasecmp(mime, "audio/", 6);
1175        Track *track = audio ? &mAudioTrack : &mVideoTrack;
1176        if (track->mSource != NULL && track->mIndex == trackIndex) {
1177            return OK;
1178        }
1179
1180        sp<AMessage> msg = new AMessage(kWhatChangeAVSource, this);
1181        msg->setInt32("trackIndex", trackIndex);
1182        msg->post();
1183        return OK;
1184    }
1185
1186    return INVALID_OPERATION;
1187}
1188
1189status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs) {
1190    sp<AMessage> msg = new AMessage(kWhatSeek, this);
1191    msg->setInt64("seekTimeUs", seekTimeUs);
1192
1193    sp<AMessage> response;
1194    status_t err = msg->postAndAwaitResponse(&response);
1195    if (err == OK && response != NULL) {
1196        CHECK(response->findInt32("err", &err));
1197    }
1198
1199    return err;
1200}
1201
1202void NuPlayer::GenericSource::onSeek(sp<AMessage> msg) {
1203    int64_t seekTimeUs;
1204    CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
1205
1206    sp<AMessage> response = new AMessage;
1207    status_t err = doSeek(seekTimeUs);
1208    response->setInt32("err", err);
1209
1210    sp<AReplyToken> replyID;
1211    CHECK(msg->senderAwaitsResponse(&replyID));
1212    response->postReply(replyID);
1213}
1214
1215status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
1216    mBufferingMonitor->updateDequeuedBufferTime(-1ll);
1217
1218    // If the Widevine source is stopped, do not attempt to read any
1219    // more buffers.
1220    if (mStopRead) {
1221        return INVALID_OPERATION;
1222    }
1223    if (mVideoTrack.mSource != NULL) {
1224        int64_t actualTimeUs;
1225        readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, &actualTimeUs);
1226
1227        seekTimeUs = actualTimeUs;
1228        mVideoLastDequeueTimeUs = seekTimeUs;
1229    }
1230
1231    if (mAudioTrack.mSource != NULL) {
1232        readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs);
1233        mAudioLastDequeueTimeUs = seekTimeUs;
1234    }
1235
1236    setDrmPlaybackStatusIfNeeded(Playback::START, seekTimeUs / 1000);
1237    if (!mStarted) {
1238        setDrmPlaybackStatusIfNeeded(Playback::PAUSE, 0);
1239    }
1240
1241    // If currently buffering, post kWhatBufferingEnd first, so that
1242    // NuPlayer resumes. Otherwise, if cache hits high watermark
1243    // before new polling happens, no one will resume the playback.
1244    mBufferingMonitor->stopBufferingIfNecessary();
1245    mBufferingMonitor->restartPollBuffering();
1246
1247    return OK;
1248}
1249
1250sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
1251        MediaBuffer* mb,
1252        media_track_type trackType,
1253        int64_t /* seekTimeUs */,
1254        int64_t *actualTimeUs) {
1255    bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
1256    size_t outLength = mb->range_length();
1257
1258    if (audio && mAudioIsVorbis) {
1259        outLength += sizeof(int32_t);
1260    }
1261
1262    sp<ABuffer> ab;
1263    if (mIsSecure && !audio) {
1264        // data is already provided in the buffer
1265        ab = new ABuffer(NULL, mb->range_length());
1266        mb->add_ref();
1267        ab->setMediaBufferBase(mb);
1268    } else {
1269        ab = new ABuffer(outLength);
1270        memcpy(ab->data(),
1271               (const uint8_t *)mb->data() + mb->range_offset(),
1272               mb->range_length());
1273    }
1274
1275    if (audio && mAudioIsVorbis) {
1276        int32_t numPageSamples;
1277        if (!mb->meta_data()->findInt32(kKeyValidSamples, &numPageSamples)) {
1278            numPageSamples = -1;
1279        }
1280
1281        uint8_t* abEnd = ab->data() + mb->range_length();
1282        memcpy(abEnd, &numPageSamples, sizeof(numPageSamples));
1283    }
1284
1285    sp<AMessage> meta = ab->meta();
1286
1287    int64_t timeUs;
1288    CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
1289    meta->setInt64("timeUs", timeUs);
1290
1291#if 0
1292    // Temporarily disable pre-roll till we have a full solution to handle
1293    // both single seek and continous seek gracefully.
1294    if (seekTimeUs > timeUs) {
1295        sp<AMessage> extra = new AMessage;
1296        extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
1297        meta->setMessage("extra", extra);
1298    }
1299#endif
1300
1301    if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
1302        const char *mime;
1303        CHECK(mTimedTextTrack.mSource != NULL
1304                && mTimedTextTrack.mSource->getFormat()->findCString(kKeyMIMEType, &mime));
1305        meta->setString("mime", mime);
1306    }
1307
1308    int64_t durationUs;
1309    if (mb->meta_data()->findInt64(kKeyDuration, &durationUs)) {
1310        meta->setInt64("durationUs", durationUs);
1311    }
1312
1313    if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
1314        meta->setInt32("trackIndex", mSubtitleTrack.mIndex);
1315    }
1316
1317    uint32_t dataType; // unused
1318    const void *seiData;
1319    size_t seiLength;
1320    if (mb->meta_data()->findData(kKeySEI, &dataType, &seiData, &seiLength)) {
1321        sp<ABuffer> sei = ABuffer::CreateAsCopy(seiData, seiLength);;
1322        meta->setBuffer("sei", sei);
1323    }
1324
1325    const void *mpegUserDataPointer;
1326    size_t mpegUserDataLength;
1327    if (mb->meta_data()->findData(
1328            kKeyMpegUserData, &dataType, &mpegUserDataPointer, &mpegUserDataLength)) {
1329        sp<ABuffer> mpegUserData = ABuffer::CreateAsCopy(mpegUserDataPointer, mpegUserDataLength);
1330        meta->setBuffer("mpegUserData", mpegUserData);
1331    }
1332
1333    if (actualTimeUs) {
1334        *actualTimeUs = timeUs;
1335    }
1336
1337    mb->release();
1338    mb = NULL;
1339
1340    return ab;
1341}
1342
1343void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {
1344    Mutex::Autolock _l(mReadBufferLock);
1345
1346    if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
1347        mPendingReadBufferTypes |= (1 << trackType);
1348        sp<AMessage> msg = new AMessage(kWhatReadBuffer, this);
1349        msg->setInt32("trackType", trackType);
1350        msg->post();
1351    }
1352}
1353
1354void NuPlayer::GenericSource::onReadBuffer(sp<AMessage> msg) {
1355    int32_t tmpType;
1356    CHECK(msg->findInt32("trackType", &tmpType));
1357    media_track_type trackType = (media_track_type)tmpType;
1358    readBuffer(trackType);
1359    {
1360        // only protect the variable change, as readBuffer may
1361        // take considerable time.
1362        Mutex::Autolock _l(mReadBufferLock);
1363        mPendingReadBufferTypes &= ~(1 << trackType);
1364    }
1365}
1366
1367void NuPlayer::GenericSource::readBuffer(
1368        media_track_type trackType, int64_t seekTimeUs, int64_t *actualTimeUs, bool formatChange) {
1369    // Do not read data if Widevine source is stopped
1370    if (mStopRead) {
1371        return;
1372    }
1373    Track *track;
1374    size_t maxBuffers = 1;
1375    switch (trackType) {
1376        case MEDIA_TRACK_TYPE_VIDEO:
1377            track = &mVideoTrack;
1378            if (mIsWidevine) {
1379                maxBuffers = 2;
1380            } else {
1381                maxBuffers = 4;
1382            }
1383            break;
1384        case MEDIA_TRACK_TYPE_AUDIO:
1385            track = &mAudioTrack;
1386            if (mIsWidevine) {
1387                maxBuffers = 8;
1388            } else {
1389                maxBuffers = 64;
1390            }
1391            break;
1392        case MEDIA_TRACK_TYPE_SUBTITLE:
1393            track = &mSubtitleTrack;
1394            break;
1395        case MEDIA_TRACK_TYPE_TIMEDTEXT:
1396            track = &mTimedTextTrack;
1397            break;
1398        default:
1399            TRESPASS();
1400    }
1401
1402    if (track->mSource == NULL) {
1403        return;
1404    }
1405
1406    if (actualTimeUs) {
1407        *actualTimeUs = seekTimeUs;
1408    }
1409
1410    MediaSource::ReadOptions options;
1411
1412    bool seeking = false;
1413
1414    if (seekTimeUs >= 0) {
1415        options.setSeekTo(seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
1416        seeking = true;
1417    }
1418
1419    if (mIsWidevine) {
1420        options.setNonBlocking();
1421    }
1422
1423    bool couldReadMultiple = (!mIsWidevine && trackType == MEDIA_TRACK_TYPE_AUDIO);
1424    for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
1425        Vector<MediaBuffer *> mediaBuffers;
1426        status_t err = NO_ERROR;
1427
1428        if (!seeking && couldReadMultiple) {
1429            err = track->mSource->readMultiple(&mediaBuffers, (maxBuffers - numBuffers));
1430        } else {
1431            MediaBuffer *mbuf = NULL;
1432            err = track->mSource->read(&mbuf, &options);
1433            if (err == OK && mbuf != NULL) {
1434                mediaBuffers.push_back(mbuf);
1435            }
1436        }
1437
1438        options.clearSeekTo();
1439
1440        size_t id = 0;
1441        size_t count = mediaBuffers.size();
1442        for (; id < count; ++id) {
1443            int64_t timeUs;
1444            MediaBuffer *mbuf = mediaBuffers[id];
1445            if (!mbuf->meta_data()->findInt64(kKeyTime, &timeUs)) {
1446                mbuf->meta_data()->dumpToLog();
1447                track->mPackets->signalEOS(ERROR_MALFORMED);
1448                break;
1449            }
1450            if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
1451                mAudioTimeUs = timeUs;
1452                mBufferingMonitor->updateQueuedTime(true /* isAudio */, timeUs);
1453            } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
1454                mVideoTimeUs = timeUs;
1455                mBufferingMonitor->updateQueuedTime(false /* isAudio */, timeUs);
1456            }
1457
1458            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
1459
1460            sp<ABuffer> buffer = mediaBufferToABuffer(
1461                    mbuf, trackType, seekTimeUs,
1462                    numBuffers == 0 ? actualTimeUs : NULL);
1463            track->mPackets->queueAccessUnit(buffer);
1464            formatChange = false;
1465            seeking = false;
1466            ++numBuffers;
1467        }
1468        if (id < count) {
1469            // Error, some mediaBuffer doesn't have kKeyTime.
1470            for (; id < count; ++id) {
1471                mediaBuffers[id]->release();
1472            }
1473            break;
1474        }
1475
1476        if (err == WOULD_BLOCK) {
1477            break;
1478        } else if (err == INFO_FORMAT_CHANGED) {
1479#if 0
1480            track->mPackets->queueDiscontinuity(
1481                    ATSParser::DISCONTINUITY_FORMATCHANGE,
1482                    NULL,
1483                    false /* discard */);
1484#endif
1485        } else if (err != OK) {
1486            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
1487            track->mPackets->signalEOS(err);
1488            break;
1489        }
1490    }
1491}
1492
1493void NuPlayer::GenericSource::queueDiscontinuityIfNeeded(
1494        bool seeking, bool formatChange, media_track_type trackType, Track *track) {
1495    // formatChange && seeking: track whose source is changed during selection
1496    // formatChange && !seeking: track whose source is not changed during selection
1497    // !formatChange: normal seek
1498    if ((seeking || formatChange)
1499            && (trackType == MEDIA_TRACK_TYPE_AUDIO
1500            || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
1501        ATSParser::DiscontinuityType type = (formatChange && seeking)
1502                ? ATSParser::DISCONTINUITY_FORMATCHANGE
1503                : ATSParser::DISCONTINUITY_NONE;
1504        track->mPackets->queueDiscontinuity(type, NULL /* extra */, true /* discard */);
1505    }
1506}
1507
1508NuPlayer::GenericSource::BufferingMonitor::BufferingMonitor(const sp<AMessage> &notify)
1509    : mNotify(notify),
1510      mDurationUs(-1ll),
1511      mBitrate(-1ll),
1512      mIsStreaming(false),
1513      mAudioTimeUs(0),
1514      mVideoTimeUs(0),
1515      mPollBufferingGeneration(0),
1516      mPrepareBuffering(false),
1517      mBuffering(false),
1518      mPrevBufferPercentage(-1),
1519      mOffloadAudio(false),
1520      mFirstDequeuedBufferRealUs(-1ll),
1521      mFirstDequeuedBufferMediaUs(-1ll),
1522      mlastDequeuedBufferMediaUs(-1ll) {
1523}
1524
1525NuPlayer::GenericSource::BufferingMonitor::~BufferingMonitor() {
1526}
1527
1528void NuPlayer::GenericSource::BufferingMonitor::prepare(
1529        const sp<NuCachedSource2> &cachedSource,
1530        const sp<WVMExtractor> &wvmExtractor,
1531        int64_t durationUs,
1532        int64_t bitrate,
1533        bool isStreaming) {
1534    Mutex::Autolock _l(mLock);
1535    prepare_l(cachedSource, wvmExtractor, durationUs, bitrate, isStreaming);
1536}
1537
1538void NuPlayer::GenericSource::BufferingMonitor::stop() {
1539    Mutex::Autolock _l(mLock);
1540    prepare_l(NULL /* cachedSource */, NULL /* wvmExtractor */, -1 /* durationUs */,
1541            -1 /* bitrate */, false /* isStreaming */);
1542}
1543
1544void NuPlayer::GenericSource::BufferingMonitor::cancelPollBuffering() {
1545    Mutex::Autolock _l(mLock);
1546    cancelPollBuffering_l();
1547}
1548
1549void NuPlayer::GenericSource::BufferingMonitor::restartPollBuffering() {
1550    Mutex::Autolock _l(mLock);
1551    if (mIsStreaming) {
1552        cancelPollBuffering_l();
1553        onPollBuffering_l();
1554    }
1555}
1556
1557void NuPlayer::GenericSource::BufferingMonitor::stopBufferingIfNecessary() {
1558    Mutex::Autolock _l(mLock);
1559    stopBufferingIfNecessary_l();
1560}
1561
1562void NuPlayer::GenericSource::BufferingMonitor::ensureCacheIsFetching() {
1563    Mutex::Autolock _l(mLock);
1564    ensureCacheIsFetching_l();
1565}
1566
1567void NuPlayer::GenericSource::BufferingMonitor::updateQueuedTime(bool isAudio, int64_t timeUs) {
1568    Mutex::Autolock _l(mLock);
1569    if (isAudio) {
1570        mAudioTimeUs = timeUs;
1571    } else {
1572        mVideoTimeUs = timeUs;
1573    }
1574}
1575
1576void NuPlayer::GenericSource::BufferingMonitor::setOffloadAudio(bool offload) {
1577    Mutex::Autolock _l(mLock);
1578    mOffloadAudio = offload;
1579}
1580
1581void NuPlayer::GenericSource::BufferingMonitor::updateDequeuedBufferTime(int64_t mediaUs) {
1582    Mutex::Autolock _l(mLock);
1583    if (mediaUs < 0) {
1584        mFirstDequeuedBufferRealUs = -1ll;
1585        mFirstDequeuedBufferMediaUs = -1ll;
1586    } else if (mFirstDequeuedBufferRealUs < 0) {
1587        mFirstDequeuedBufferRealUs = ALooper::GetNowUs();
1588        mFirstDequeuedBufferMediaUs = mediaUs;
1589    }
1590    mlastDequeuedBufferMediaUs = mediaUs;
1591}
1592
1593void NuPlayer::GenericSource::BufferingMonitor::prepare_l(
1594        const sp<NuCachedSource2> &cachedSource,
1595        const sp<WVMExtractor> &wvmExtractor,
1596        int64_t durationUs,
1597        int64_t bitrate,
1598        bool isStreaming) {
1599    ALOGW_IF(wvmExtractor != NULL && cachedSource != NULL,
1600            "WVMExtractor and NuCachedSource are both present when "
1601            "BufferingMonitor::prepare_l is called, ignore NuCachedSource");
1602
1603    mCachedSource = cachedSource;
1604    mWVMExtractor = wvmExtractor;
1605    mDurationUs = durationUs;
1606    mBitrate = bitrate;
1607    mIsStreaming = isStreaming;
1608    mAudioTimeUs = 0;
1609    mVideoTimeUs = 0;
1610    mPrepareBuffering = (cachedSource != NULL || wvmExtractor != NULL);
1611    cancelPollBuffering_l();
1612    mOffloadAudio = false;
1613    mFirstDequeuedBufferRealUs = -1ll;
1614    mFirstDequeuedBufferMediaUs = -1ll;
1615    mlastDequeuedBufferMediaUs = -1ll;
1616}
1617
1618void NuPlayer::GenericSource::BufferingMonitor::cancelPollBuffering_l() {
1619    mBuffering = false;
1620    ++mPollBufferingGeneration;
1621    mPrevBufferPercentage = -1;
1622}
1623
1624void NuPlayer::GenericSource::BufferingMonitor::notifyBufferingUpdate_l(int32_t percentage) {
1625    // Buffering percent could go backward as it's estimated from remaining
1626    // data and last access time. This could cause the buffering position
1627    // drawn on media control to jitter slightly. Remember previously reported
1628    // percentage and don't allow it to go backward.
1629    if (percentage < mPrevBufferPercentage) {
1630        percentage = mPrevBufferPercentage;
1631    } else if (percentage > 100) {
1632        percentage = 100;
1633    }
1634
1635    mPrevBufferPercentage = percentage;
1636
1637    ALOGV("notifyBufferingUpdate_l: buffering %d%%", percentage);
1638
1639    sp<AMessage> msg = mNotify->dup();
1640    msg->setInt32("what", kWhatBufferingUpdate);
1641    msg->setInt32("percentage", percentage);
1642    msg->post();
1643}
1644
1645void NuPlayer::GenericSource::BufferingMonitor::startBufferingIfNecessary_l() {
1646    if (mPrepareBuffering) {
1647        return;
1648    }
1649
1650    if (!mBuffering) {
1651        ALOGD("startBufferingIfNecessary_l");
1652
1653        mBuffering = true;
1654
1655        ensureCacheIsFetching_l();
1656        sendCacheStats_l();
1657
1658        sp<AMessage> notify = mNotify->dup();
1659        notify->setInt32("what", kWhatPauseOnBufferingStart);
1660        notify->post();
1661    }
1662}
1663
1664void NuPlayer::GenericSource::BufferingMonitor::stopBufferingIfNecessary_l() {
1665    if (mPrepareBuffering) {
1666        ALOGD("stopBufferingIfNecessary_l, mBuffering=%d", mBuffering);
1667
1668        mPrepareBuffering = false;
1669
1670        sp<AMessage> notify = mNotify->dup();
1671        notify->setInt32("what", kWhatPrepared);
1672        notify->setInt32("err", OK);
1673        notify->post();
1674
1675        return;
1676    }
1677
1678    if (mBuffering) {
1679        ALOGD("stopBufferingIfNecessary_l");
1680        mBuffering = false;
1681
1682        sendCacheStats_l();
1683
1684        sp<AMessage> notify = mNotify->dup();
1685        notify->setInt32("what", kWhatResumeOnBufferingEnd);
1686        notify->post();
1687    }
1688}
1689
1690void NuPlayer::GenericSource::BufferingMonitor::sendCacheStats_l() {
1691    int32_t kbps = 0;
1692    status_t err = UNKNOWN_ERROR;
1693
1694    if (mWVMExtractor != NULL) {
1695        err = mWVMExtractor->getEstimatedBandwidthKbps(&kbps);
1696    } else if (mCachedSource != NULL) {
1697        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
1698    }
1699
1700    if (err == OK) {
1701        sp<AMessage> notify = mNotify->dup();
1702        notify->setInt32("what", kWhatCacheStats);
1703        notify->setInt32("bandwidth", kbps);
1704        notify->post();
1705    }
1706}
1707
1708void NuPlayer::GenericSource::BufferingMonitor::ensureCacheIsFetching_l() {
1709    if (mCachedSource != NULL) {
1710        mCachedSource->resumeFetchingIfNecessary();
1711    }
1712}
1713
1714void NuPlayer::GenericSource::BufferingMonitor::schedulePollBuffering_l() {
1715    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
1716    msg->setInt32("generation", mPollBufferingGeneration);
1717    // Enquires buffering status every second.
1718    msg->post(1000000ll);
1719}
1720
1721int64_t NuPlayer::GenericSource::BufferingMonitor::getLastReadPosition_l() {
1722    if (mAudioTimeUs > 0) {
1723        return mAudioTimeUs;
1724    } else if (mVideoTimeUs > 0) {
1725        return mVideoTimeUs;
1726    } else {
1727        return 0;
1728    }
1729}
1730
1731void NuPlayer::GenericSource::BufferingMonitor::onPollBuffering_l() {
1732    status_t finalStatus = UNKNOWN_ERROR;
1733    int64_t cachedDurationUs = -1ll;
1734    ssize_t cachedDataRemaining = -1;
1735
1736    if (mWVMExtractor != NULL) {
1737        cachedDurationUs =
1738                mWVMExtractor->getCachedDurationUs(&finalStatus);
1739    } else if (mCachedSource != NULL) {
1740        cachedDataRemaining =
1741                mCachedSource->approxDataRemaining(&finalStatus);
1742
1743        if (finalStatus == OK) {
1744            off64_t size;
1745            int64_t bitrate = 0ll;
1746            if (mDurationUs > 0 && mCachedSource->getSize(&size) == OK) {
1747                // |bitrate| uses bits/second unit, while size is number of bytes.
1748                bitrate = size * 8000000ll / mDurationUs;
1749            } else if (mBitrate > 0) {
1750                bitrate = mBitrate;
1751            }
1752            if (bitrate > 0) {
1753                cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;
1754            }
1755        }
1756    }
1757
1758    if (finalStatus != OK) {
1759        ALOGV("onPollBuffering_l: EOS (finalStatus = %d)", finalStatus);
1760
1761        if (finalStatus == ERROR_END_OF_STREAM) {
1762            notifyBufferingUpdate_l(100);
1763        }
1764
1765        stopBufferingIfNecessary_l();
1766        return;
1767    } else if (cachedDurationUs >= 0ll) {
1768        if (mDurationUs > 0ll) {
1769            int64_t cachedPosUs = getLastReadPosition_l() + cachedDurationUs;
1770            int percentage = 100.0 * cachedPosUs / mDurationUs;
1771            if (percentage > 100) {
1772                percentage = 100;
1773            }
1774
1775            notifyBufferingUpdate_l(percentage);
1776        }
1777
1778        ALOGV("onPollBuffering_l: cachedDurationUs %.1f sec",
1779                cachedDurationUs / 1000000.0f);
1780
1781        if (cachedDurationUs < kLowWaterMarkUs) {
1782            // Take into account the data cached in downstream components to try to avoid
1783            // unnecessary pause.
1784            if (mOffloadAudio && mFirstDequeuedBufferRealUs >= 0) {
1785                int64_t downStreamCacheUs = mlastDequeuedBufferMediaUs - mFirstDequeuedBufferMediaUs
1786                        - (ALooper::GetNowUs() - mFirstDequeuedBufferRealUs);
1787                if (downStreamCacheUs > 0) {
1788                    cachedDurationUs += downStreamCacheUs;
1789                }
1790            }
1791
1792            if (cachedDurationUs < kLowWaterMarkUs) {
1793                startBufferingIfNecessary_l();
1794            }
1795        } else {
1796            int64_t highWaterMark = mPrepareBuffering ? kHighWaterMarkUs : kHighWaterMarkRebufferUs;
1797            if (cachedDurationUs > highWaterMark) {
1798                stopBufferingIfNecessary_l();
1799            }
1800        }
1801    } else if (cachedDataRemaining >= 0) {
1802        ALOGV("onPollBuffering_l: cachedDataRemaining %zd bytes",
1803                cachedDataRemaining);
1804
1805        if (cachedDataRemaining < kLowWaterMarkBytes) {
1806            startBufferingIfNecessary_l();
1807        } else if (cachedDataRemaining > kHighWaterMarkBytes) {
1808            stopBufferingIfNecessary_l();
1809        }
1810    }
1811
1812    schedulePollBuffering_l();
1813}
1814
1815void NuPlayer::GenericSource::BufferingMonitor::onMessageReceived(const sp<AMessage> &msg) {
1816    switch (msg->what()) {
1817      case kWhatPollBuffering:
1818      {
1819          int32_t generation;
1820          CHECK(msg->findInt32("generation", &generation));
1821          Mutex::Autolock _l(mLock);
1822          if (generation == mPollBufferingGeneration) {
1823              onPollBuffering_l();
1824          }
1825          break;
1826      }
1827      default:
1828          TRESPASS();
1829          break;
1830    }
1831}
1832
1833}  // namespace android
1834