VideoFrameScheduler.cpp revision 5d6fb5e41f57a71bd5b2902dc8334825de7bdcc0
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "VideoFrameScheduler"
19#include <utils/Log.h>
20#define ATRACE_TAG ATRACE_TAG_VIDEO
21#include <utils/Trace.h>
22
23#include <sys/time.h>
24
25#include <binder/IServiceManager.h>
26#include <gui/ISurfaceComposer.h>
27#include <ui/DisplayStatInfo.h>
28
29#include <media/stagefright/foundation/ADebug.h>
30
31#include "VideoFrameScheduler.h"
32
33namespace android {
34
35static const nsecs_t kNanosIn1s = 1000000000;
36
37template<class T>
38inline static const T divRound(const T &nom, const T &den) {
39    if ((nom >= 0) ^ (den >= 0)) {
40        return (nom - den / 2) / den;
41    } else {
42        return (nom + den / 2) / den;
43    }
44}
45
46template<class T>
47inline static T abs(const T &a) {
48    return a < 0 ? -a : a;
49}
50
51template<class T>
52inline static const T &min(const T &a, const T &b) {
53    return a < b ? a : b;
54}
55
56template<class T>
57inline static const T &max(const T &a, const T &b) {
58    return a > b ? a : b;
59}
60
61template<class T>
62inline static T periodicError(const T &val, const T &period) {
63    T err = abs(val) % period;
64    return (err < (period / 2)) ? err : (period - err);
65}
66
67template<class T>
68static int compare(const T *lhs, const T *rhs) {
69    if (*lhs < *rhs) {
70        return -1;
71    } else if (*lhs > *rhs) {
72        return 1;
73    } else {
74        return 0;
75    }
76}
77
78/* ======================================================================= */
79/*                                   PLL                                   */
80/* ======================================================================= */
81
82static const size_t kMinSamplesToStartPrime = 3;
83static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
84static const size_t kMinSamplesToEstimatePeriod = 3;
85static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
86
87static const size_t kPrecision = 12;
88static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
89static const int64_t kMultiplesThresholdDiv = 4;            // 25%
90static const int64_t kReFitThresholdDiv = 100;              // 1%
91static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s;     // 1 sec
92static const nsecs_t kMinPeriod = kNanosIn1s / 120;         // 120Hz
93static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
94
95VideoFrameScheduler::PLL::PLL()
96    : mPeriod(-1),
97      mPhase(0),
98      mPrimed(false),
99      mSamplesUsedForPriming(0),
100      mLastTime(-1),
101      mNumSamples(0) {
102}
103
104void VideoFrameScheduler::PLL::reset(float fps) {
105    //test();
106
107    mSamplesUsedForPriming = 0;
108    mLastTime = -1;
109
110    // set up or reset video PLL
111    if (fps <= 0.f) {
112        mPeriod = -1;
113        mPrimed = false;
114    } else {
115        ALOGV("reset at %.1f fps", fps);
116        mPeriod = (nsecs_t)(1e9 / fps + 0.5);
117        mPrimed = true;
118    }
119
120    restart();
121}
122
123// reset PLL but keep previous period estimate
124void VideoFrameScheduler::PLL::restart() {
125    mNumSamples = 0;
126    mPhase = -1;
127}
128
129#if 0
130
131void VideoFrameScheduler::PLL::test() {
132    nsecs_t period = kNanosIn1s / 60;
133    mTimes[0] = 0;
134    mTimes[1] = period;
135    mTimes[2] = period * 3;
136    mTimes[3] = period * 4;
137    mTimes[4] = period * 7;
138    mTimes[5] = period * 8;
139    mTimes[6] = period * 10;
140    mTimes[7] = period * 12;
141    mNumSamples = 8;
142    int64_t a, b, err;
143    fit(0, period * 12 / 7, 8, &a, &b, &err);
144    // a = 0.8(5)+
145    // b = -0.14097(2)+
146    // err = 0.2750578(703)+
147    ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
148            (long long)a, (a / (float)(1 << kPrecision)),
149            (long long)b, (b / (float)(1 << kPrecision)),
150            (long long)err, (err / (float)(1 << (kPrecision * 2))));
151}
152
153#endif
154
155bool VideoFrameScheduler::PLL::fit(
156        nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
157        int64_t *a, int64_t *b, int64_t *err) {
158    if (numSamplesToUse > mNumSamples) {
159        numSamplesToUse = mNumSamples;
160    }
161
162    int64_t sumX = 0;
163    int64_t sumXX = 0;
164    int64_t sumXY = 0;
165    int64_t sumYY = 0;
166    int64_t sumY = 0;
167
168    int64_t x = 0; // x usually is in [0..numSamplesToUse)
169    nsecs_t lastTime;
170    for (size_t i = 0; i < numSamplesToUse; i++) {
171        size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
172        nsecs_t time = mTimes[ix];
173        if (i > 0) {
174            x += divRound(time - lastTime, period);
175        }
176        // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
177        //   ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
178        //   priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
179        //   while we are not refitting.
180        int64_t y = divRound(time - phase, period >> kPrecision);
181        sumX += x;
182        sumY += y;
183        sumXX += x * x;
184        sumXY += x * y;
185        sumYY += y * y;
186        lastTime = time;
187    }
188
189    int64_t div   = numSamplesToUse * sumXX - sumX * sumX;
190    if (div == 0) {
191        return false;
192    }
193
194    int64_t a_nom = numSamplesToUse * sumXY - sumX * sumY;
195    int64_t b_nom = sumXX * sumY            - sumX * sumXY;
196    *a = divRound(a_nom, div);
197    *b = divRound(b_nom, div);
198    // don't use a and b directly as the rounding error is significant
199    *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
200    ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
201            numSamplesToUse,
202            (long long)*a,   (*a / (float)(1 << kPrecision)),
203            (long long)*b,   (*b / (float)(1 << kPrecision)),
204            (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
205    return true;
206}
207
208void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
209    if (numSamplesToUse > mNumSamples) {
210        numSamplesToUse = mNumSamples;
211    }
212    CHECK(numSamplesToUse >= 3);  // must have at least 3 samples
213
214    // estimate video framerate from deltas between timestamps, and
215    // 2nd order deltas
216    Vector<nsecs_t> deltas;
217    nsecs_t lastTime, firstTime;
218    for (size_t i = 0; i < numSamplesToUse; ++i) {
219        size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
220        nsecs_t time = mTimes[index];
221        if (i > 0) {
222            if (time - lastTime > kMinPeriod) {
223                //ALOGV("delta: %lld", (long long)(time - lastTime));
224                deltas.push(time - lastTime);
225            }
226        } else {
227            firstTime = time;
228        }
229        lastTime = time;
230    }
231    deltas.sort(compare<nsecs_t>);
232    size_t numDeltas = deltas.size();
233    if (numDeltas > 1) {
234        nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
235        nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
236        for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
237            if (deltas[i] > deltaMaxLimit) {
238                deltas.resize(i);
239                numDeltas = i;
240                break;
241            }
242        }
243        for (size_t i = 1; i < numDeltas; ++i) {
244            nsecs_t delta2nd = deltas[i] - deltas[i - 1];
245            if (delta2nd >= deltaMinLimit) {
246                //ALOGV("delta2: %lld", (long long)(delta2nd));
247                deltas.push(delta2nd);
248            }
249        }
250    }
251
252    // use the one that yields the best match
253    int64_t bestScore;
254    for (size_t i = 0; i < deltas.size(); ++i) {
255        nsecs_t delta = deltas[i];
256        int64_t score = 0;
257#if 1
258        // simplest score: number of deltas that are near multiples
259        size_t matches = 0;
260        for (size_t j = 0; j < deltas.size(); ++j) {
261            nsecs_t err = periodicError(deltas[j], delta);
262            if (err < delta / kMultiplesThresholdDiv) {
263                ++matches;
264            }
265        }
266        score = matches;
267#if 0
268        // could be weighed by the (1 - normalized error)
269        if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
270            int64_t a, b, err;
271            fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
272            err = (1 << (2 * kPrecision)) - err;
273            score *= max(0, err);
274        }
275#endif
276#else
277        // or use the error as a negative score
278        if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
279            int64_t a, b, err;
280            fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
281            score = -delta * err;
282        }
283#endif
284        if (i == 0 || score > bestScore) {
285            bestScore = score;
286            mPeriod = delta;
287            mPhase = firstTime;
288        }
289    }
290    ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod);
291}
292
293nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
294    if (mLastTime >= 0
295            // if time goes backward, or we skipped rendering
296            && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
297        restart();
298    }
299
300    mLastTime = time;
301    mTimes[mNumSamples % kHistorySize] = time;
302    ++mNumSamples;
303
304    bool doFit = time > mRefitAt;
305    if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
306        prime(kMinSamplesToStopPrime);
307        ++mSamplesUsedForPriming;
308        doFit = true;
309    }
310    if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
311        if (mPhase < 0) {
312            // initialize phase to the current render time
313            mPhase = time;
314            doFit = true;
315        } else if (!doFit) {
316            int64_t err = periodicError(time - mPhase, mPeriod);
317            doFit = err > mPeriod / kReFitThresholdDiv;
318        }
319
320        if (doFit) {
321            int64_t a, b, err;
322            if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
323                // samples are not suitable for fitting.  this means they are
324                // also not suitable for priming.
325                ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
326                return mPeriod;
327            }
328
329            mRefitAt = time + kRefitRefreshPeriod;
330
331            mPhase += (mPeriod * b) >> kPrecision;
332            mPeriod = (mPeriod * a) >> kPrecision;
333            ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
334
335            if (err < kErrorThreshold) {
336                if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
337                    mPrimed = true;
338                }
339            } else {
340                mPrimed = false;
341                mSamplesUsedForPriming = 0;
342            }
343        }
344    }
345    return mPeriod;
346}
347
348/* ======================================================================= */
349/*                             Frame Scheduler                             */
350/* ======================================================================= */
351
352static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60;  // 60Hz
353static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s;       // 1 sec
354
355VideoFrameScheduler::VideoFrameScheduler()
356    : mVsyncTime(0),
357      mVsyncPeriod(0),
358      mVsyncRefreshAt(0),
359      mLastVsyncTime(-1),
360      mTimeCorrection(0) {
361}
362
363void VideoFrameScheduler::updateVsync() {
364    mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
365    mVsyncPeriod = 0;
366    mVsyncTime = 0;
367
368    // TODO: schedule frames for the destination surface
369    // For now, surface flinger only schedules frames on the primary display
370    if (mComposer == NULL) {
371        String16 name("SurfaceFlinger");
372        sp<IServiceManager> sm = defaultServiceManager();
373        mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
374    }
375    if (mComposer != NULL) {
376        DisplayStatInfo stats;
377        status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
378        if (res == OK) {
379            ALOGV("vsync time:%lld period:%lld",
380                    (long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
381            mVsyncTime = stats.vsyncTime;
382            mVsyncPeriod = stats.vsyncPeriod;
383        } else {
384            ALOGW("getDisplayStats returned %d", res);
385        }
386    } else {
387        ALOGW("could not get surface mComposer service");
388    }
389}
390
391void VideoFrameScheduler::init(float videoFps) {
392    updateVsync();
393
394    mLastVsyncTime = -1;
395    mTimeCorrection = 0;
396
397    mPll.reset(videoFps);
398}
399
400void VideoFrameScheduler::restart() {
401    mLastVsyncTime = -1;
402    mTimeCorrection = 0;
403
404    mPll.restart();
405}
406
407nsecs_t VideoFrameScheduler::getVsyncPeriod() {
408    if (mVsyncPeriod > 0) {
409        return mVsyncPeriod;
410    }
411    return kDefaultVsyncPeriod;
412}
413
414nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
415    nsecs_t origRenderTime = renderTime;
416
417    nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
418    if (now >= mVsyncRefreshAt) {
419        updateVsync();
420    }
421
422    // without VSYNC info, there is nothing to do
423    if (mVsyncPeriod == 0) {
424        ALOGV("no vsync: render=%lld", (long long)renderTime);
425        return renderTime;
426    }
427
428    // ensure vsync time is well before (corrected) render time
429    if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
430        mVsyncTime -=
431            ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
432    }
433
434    // Video presentation takes place at the VSYNC _after_ renderTime.  Adjust renderTime
435    // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
436    renderTime -= mVsyncPeriod / 2;
437
438    const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
439    if (videoPeriod > 0) {
440        // Smooth out rendering
441        size_t N = 12;
442        nsecs_t fiveSixthDev =
443            abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
444                    / (mVsyncPeriod / 100);
445        // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
446        if (fiveSixthDev < 12) {  /* 12% / 6 = 2% */
447            N = 20;
448        }
449
450        nsecs_t offset = 0;
451        nsecs_t edgeRemainder = 0;
452        for (size_t i = 1; i <= N; i++) {
453            offset +=
454                (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
455            edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
456        }
457        mTimeCorrection += mVsyncPeriod / 2 - offset / N;
458        renderTime += mTimeCorrection;
459        nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
460        edgeRemainder = abs(edgeRemainder / N - mVsyncPeriod / 2);
461        if (edgeRemainder <= mVsyncPeriod / 3) {
462            correctionLimit /= 2;
463        }
464
465        // estimate how many VSYNCs a frame will spend on the display
466        nsecs_t nextVsyncTime =
467            renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
468        if (mLastVsyncTime >= 0) {
469            size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
470            size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
471            bool vsyncsPerFrameAreNearlyConstant =
472                periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
473
474            if (mTimeCorrection > correctionLimit &&
475                    (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
476                // remove a VSYNC
477                mTimeCorrection -= mVsyncPeriod / 2;
478                renderTime -= mVsyncPeriod / 2;
479                nextVsyncTime -= mVsyncPeriod;
480                --vsyncsForLastFrame;
481            } else if (mTimeCorrection < -correctionLimit &&
482                    (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
483                // add a VSYNC
484                mTimeCorrection += mVsyncPeriod / 2;
485                renderTime += mVsyncPeriod / 2;
486                nextVsyncTime += mVsyncPeriod;
487                ++vsyncsForLastFrame;
488            }
489            ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
490        }
491        mLastVsyncTime = nextVsyncTime;
492    }
493
494    // align rendertime to the center between VSYNC edges
495    renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
496    renderTime += mVsyncPeriod / 2;
497    ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
498    ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
499    return renderTime;
500}
501
502void VideoFrameScheduler::release() {
503    mComposer.clear();
504}
505
506VideoFrameScheduler::~VideoFrameScheduler() {
507    release();
508}
509
510} // namespace android
511
512