1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "VideoFrameScheduler"
19 #include <utils/Log.h>
20 #define ATRACE_TAG ATRACE_TAG_VIDEO
21 #include <utils/Trace.h>
22
23 #include <sys/time.h>
24
25 #include <binder/IServiceManager.h>
26 #include <gui/ISurfaceComposer.h>
27 #include <ui/DisplayStatInfo.h>
28
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/foundation/AUtils.h>
31 #include <media/stagefright/VideoFrameScheduler.h>
32
33 namespace android {
34
35 static const nsecs_t kNanosIn1s = 1000000000;
36
37 template<class T>
compare(const T * lhs,const T * rhs)38 static int compare(const T *lhs, const T *rhs) {
39 if (*lhs < *rhs) {
40 return -1;
41 } else if (*lhs > *rhs) {
42 return 1;
43 } else {
44 return 0;
45 }
46 }
47
48 /* ======================================================================= */
49 /* PLL */
50 /* ======================================================================= */
51
52 static const size_t kMinSamplesToStartPrime = 3;
53 static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
54 static const size_t kMinSamplesToEstimatePeriod = 3;
55 static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
56
57 static const size_t kPrecision = 12;
58 static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
59 static const int64_t kMultiplesThresholdDiv = 4; // 25%
60 static const int64_t kReFitThresholdDiv = 100; // 1%
61 static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
62 static const nsecs_t kMinPeriod = kNanosIn1s / 120; // 120Hz
63 static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
64
PLL()65 VideoFrameScheduler::PLL::PLL()
66 : mPeriod(-1),
67 mPhase(0),
68 mPrimed(false),
69 mSamplesUsedForPriming(0),
70 mLastTime(-1),
71 mNumSamples(0) {
72 }
73
reset(float fps)74 void VideoFrameScheduler::PLL::reset(float fps) {
75 //test();
76
77 mSamplesUsedForPriming = 0;
78 mLastTime = -1;
79
80 // set up or reset video PLL
81 if (fps <= 0.f) {
82 mPeriod = -1;
83 mPrimed = false;
84 } else {
85 ALOGV("reset at %.1f fps", fps);
86 mPeriod = (nsecs_t)(1e9 / fps + 0.5);
87 mPrimed = true;
88 }
89
90 restart();
91 }
92
93 // reset PLL but keep previous period estimate
restart()94 void VideoFrameScheduler::PLL::restart() {
95 mNumSamples = 0;
96 mPhase = -1;
97 }
98
99 #if 0
100
101 void VideoFrameScheduler::PLL::test() {
102 nsecs_t period = kNanosIn1s / 60;
103 mTimes[0] = 0;
104 mTimes[1] = period;
105 mTimes[2] = period * 3;
106 mTimes[3] = period * 4;
107 mTimes[4] = period * 7;
108 mTimes[5] = period * 8;
109 mTimes[6] = period * 10;
110 mTimes[7] = period * 12;
111 mNumSamples = 8;
112 int64_t a, b, err;
113 fit(0, period * 12 / 7, 8, &a, &b, &err);
114 // a = 0.8(5)+
115 // b = -0.14097(2)+
116 // err = 0.2750578(703)+
117 ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
118 (long long)a, (a / (float)(1 << kPrecision)),
119 (long long)b, (b / (float)(1 << kPrecision)),
120 (long long)err, (err / (float)(1 << (kPrecision * 2))));
121 }
122
123 #endif
124
fit(nsecs_t phase,nsecs_t period,size_t numSamplesToUse,int64_t * a,int64_t * b,int64_t * err)125 bool VideoFrameScheduler::PLL::fit(
126 nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
127 int64_t *a, int64_t *b, int64_t *err) {
128 if (numSamplesToUse > mNumSamples) {
129 numSamplesToUse = mNumSamples;
130 }
131
132 int64_t sumX = 0;
133 int64_t sumXX = 0;
134 int64_t sumXY = 0;
135 int64_t sumYY = 0;
136 int64_t sumY = 0;
137
138 int64_t x = 0; // x usually is in [0..numSamplesToUse)
139 nsecs_t lastTime;
140 for (size_t i = 0; i < numSamplesToUse; i++) {
141 size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
142 nsecs_t time = mTimes[ix];
143 if (i > 0) {
144 x += divRound(time - lastTime, period);
145 }
146 // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
147 // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
148 // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
149 // while we are not refitting.
150 int64_t y = divRound(time - phase, period >> kPrecision);
151 sumX += x;
152 sumY += y;
153 sumXX += x * x;
154 sumXY += x * y;
155 sumYY += y * y;
156 lastTime = time;
157 }
158
159 int64_t div = numSamplesToUse * sumXX - sumX * sumX;
160 if (div == 0) {
161 return false;
162 }
163
164 int64_t a_nom = numSamplesToUse * sumXY - sumX * sumY;
165 int64_t b_nom = sumXX * sumY - sumX * sumXY;
166 *a = divRound(a_nom, div);
167 *b = divRound(b_nom, div);
168 // don't use a and b directly as the rounding error is significant
169 *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
170 ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
171 numSamplesToUse,
172 (long long)*a, (*a / (float)(1 << kPrecision)),
173 (long long)*b, (*b / (float)(1 << kPrecision)),
174 (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
175 return true;
176 }
177
prime(size_t numSamplesToUse)178 void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
179 if (numSamplesToUse > mNumSamples) {
180 numSamplesToUse = mNumSamples;
181 }
182 CHECK(numSamplesToUse >= 3); // must have at least 3 samples
183
184 // estimate video framerate from deltas between timestamps, and
185 // 2nd order deltas
186 Vector<nsecs_t> deltas;
187 nsecs_t lastTime, firstTime;
188 for (size_t i = 0; i < numSamplesToUse; ++i) {
189 size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
190 nsecs_t time = mTimes[index];
191 if (i > 0) {
192 if (time - lastTime > kMinPeriod) {
193 //ALOGV("delta: %lld", (long long)(time - lastTime));
194 deltas.push(time - lastTime);
195 }
196 } else {
197 firstTime = time;
198 }
199 lastTime = time;
200 }
201 deltas.sort(compare<nsecs_t>);
202 size_t numDeltas = deltas.size();
203 if (numDeltas > 1) {
204 nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
205 nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
206 for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
207 if (deltas[i] > deltaMaxLimit) {
208 deltas.resize(i);
209 numDeltas = i;
210 break;
211 }
212 }
213 for (size_t i = 1; i < numDeltas; ++i) {
214 nsecs_t delta2nd = deltas[i] - deltas[i - 1];
215 if (delta2nd >= deltaMinLimit) {
216 //ALOGV("delta2: %lld", (long long)(delta2nd));
217 deltas.push(delta2nd);
218 }
219 }
220 }
221
222 // use the one that yields the best match
223 int64_t bestScore;
224 for (size_t i = 0; i < deltas.size(); ++i) {
225 nsecs_t delta = deltas[i];
226 int64_t score = 0;
227 #if 1
228 // simplest score: number of deltas that are near multiples
229 size_t matches = 0;
230 for (size_t j = 0; j < deltas.size(); ++j) {
231 nsecs_t err = periodicError(deltas[j], delta);
232 if (err < delta / kMultiplesThresholdDiv) {
233 ++matches;
234 }
235 }
236 score = matches;
237 #if 0
238 // could be weighed by the (1 - normalized error)
239 if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
240 int64_t a, b, err;
241 fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
242 err = (1 << (2 * kPrecision)) - err;
243 score *= max(0, err);
244 }
245 #endif
246 #else
247 // or use the error as a negative score
248 if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
249 int64_t a, b, err;
250 fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
251 score = -delta * err;
252 }
253 #endif
254 if (i == 0 || score > bestScore) {
255 bestScore = score;
256 mPeriod = delta;
257 mPhase = firstTime;
258 }
259 }
260 ALOGV("priming[%zu] phase:%lld period:%lld",
261 numSamplesToUse, (long long)mPhase, (long long)mPeriod);
262 }
263
addSample(nsecs_t time)264 nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
265 if (mLastTime >= 0
266 // if time goes backward, or we skipped rendering
267 && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
268 restart();
269 }
270
271 mLastTime = time;
272 mTimes[mNumSamples % kHistorySize] = time;
273 ++mNumSamples;
274
275 bool doFit = time > mRefitAt;
276 if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
277 prime(kMinSamplesToStopPrime);
278 ++mSamplesUsedForPriming;
279 doFit = true;
280 }
281 if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
282 if (mPhase < 0) {
283 // initialize phase to the current render time
284 mPhase = time;
285 doFit = true;
286 } else if (!doFit) {
287 int64_t err = periodicError(time - mPhase, mPeriod);
288 doFit = err > mPeriod / kReFitThresholdDiv;
289 }
290
291 if (doFit) {
292 int64_t a, b, err;
293 if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
294 // samples are not suitable for fitting. this means they are
295 // also not suitable for priming.
296 ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
297 return mPeriod;
298 }
299
300 mRefitAt = time + kRefitRefreshPeriod;
301
302 mPhase += (mPeriod * b) >> kPrecision;
303 mPeriod = (mPeriod * a) >> kPrecision;
304 ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
305
306 if (err < kErrorThreshold) {
307 if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
308 mPrimed = true;
309 }
310 } else {
311 mPrimed = false;
312 mSamplesUsedForPriming = 0;
313 }
314 }
315 }
316 return mPeriod;
317 }
318
getPeriod() const319 nsecs_t VideoFrameScheduler::PLL::getPeriod() const {
320 return mPrimed ? mPeriod : 0;
321 }
322
323 /* ======================================================================= */
324 /* Frame Scheduler */
325 /* ======================================================================= */
326
327 static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
328 static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
329
VideoFrameScheduler()330 VideoFrameScheduler::VideoFrameScheduler()
331 : mVsyncTime(0),
332 mVsyncPeriod(0),
333 mVsyncRefreshAt(0),
334 mLastVsyncTime(-1),
335 mTimeCorrection(0) {
336 }
337
updateVsync()338 void VideoFrameScheduler::updateVsync() {
339 mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
340 mVsyncPeriod = 0;
341 mVsyncTime = 0;
342
343 // TODO: schedule frames for the destination surface
344 // For now, surface flinger only schedules frames on the primary display
345 if (mComposer == NULL) {
346 String16 name("SurfaceFlinger");
347 sp<IServiceManager> sm = defaultServiceManager();
348 mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
349 }
350 if (mComposer != NULL) {
351 DisplayStatInfo stats;
352 status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
353 if (res == OK) {
354 ALOGV("vsync time:%lld period:%lld",
355 (long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
356 mVsyncTime = stats.vsyncTime;
357 mVsyncPeriod = stats.vsyncPeriod;
358 } else {
359 ALOGW("getDisplayStats returned %d", res);
360 }
361 } else {
362 ALOGW("could not get surface mComposer service");
363 }
364 }
365
init(float videoFps)366 void VideoFrameScheduler::init(float videoFps) {
367 updateVsync();
368
369 mLastVsyncTime = -1;
370 mTimeCorrection = 0;
371
372 mPll.reset(videoFps);
373 }
374
restart()375 void VideoFrameScheduler::restart() {
376 mLastVsyncTime = -1;
377 mTimeCorrection = 0;
378
379 mPll.restart();
380 }
381
getVsyncPeriod()382 nsecs_t VideoFrameScheduler::getVsyncPeriod() {
383 if (mVsyncPeriod > 0) {
384 return mVsyncPeriod;
385 }
386 return kDefaultVsyncPeriod;
387 }
388
getFrameRate()389 float VideoFrameScheduler::getFrameRate() {
390 nsecs_t videoPeriod = mPll.getPeriod();
391 if (videoPeriod > 0) {
392 return 1e9 / videoPeriod;
393 }
394 return 0.f;
395 }
396
schedule(nsecs_t renderTime)397 nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
398 nsecs_t origRenderTime = renderTime;
399
400 nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
401 if (now >= mVsyncRefreshAt) {
402 updateVsync();
403 }
404
405 // without VSYNC info, there is nothing to do
406 if (mVsyncPeriod == 0) {
407 ALOGV("no vsync: render=%lld", (long long)renderTime);
408 return renderTime;
409 }
410
411 // ensure vsync time is well before (corrected) render time
412 if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
413 mVsyncTime -=
414 ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
415 }
416
417 // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
418 // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
419 renderTime -= mVsyncPeriod / 2;
420
421 const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
422 if (videoPeriod > 0) {
423 // Smooth out rendering
424 size_t N = 12;
425 nsecs_t fiveSixthDev =
426 abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
427 / (mVsyncPeriod / 100);
428 // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
429 if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
430 N = 20;
431 }
432
433 nsecs_t offset = 0;
434 nsecs_t edgeRemainder = 0;
435 for (size_t i = 1; i <= N; i++) {
436 offset +=
437 (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
438 edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
439 }
440 mTimeCorrection += mVsyncPeriod / 2 - offset / N;
441 renderTime += mTimeCorrection;
442 nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
443 edgeRemainder = abs(edgeRemainder / N - mVsyncPeriod / 2);
444 if (edgeRemainder <= mVsyncPeriod / 3) {
445 correctionLimit /= 2;
446 }
447
448 // estimate how many VSYNCs a frame will spend on the display
449 nsecs_t nextVsyncTime =
450 renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
451 if (mLastVsyncTime >= 0) {
452 size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
453 size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
454 bool vsyncsPerFrameAreNearlyConstant =
455 periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
456
457 if (mTimeCorrection > correctionLimit &&
458 (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
459 // remove a VSYNC
460 mTimeCorrection -= mVsyncPeriod / 2;
461 renderTime -= mVsyncPeriod / 2;
462 nextVsyncTime -= mVsyncPeriod;
463 --vsyncsForLastFrame;
464 } else if (mTimeCorrection < -correctionLimit &&
465 (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
466 // add a VSYNC
467 mTimeCorrection += mVsyncPeriod / 2;
468 renderTime += mVsyncPeriod / 2;
469 nextVsyncTime += mVsyncPeriod;
470 ++vsyncsForLastFrame;
471 }
472 ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
473 }
474 mLastVsyncTime = nextVsyncTime;
475 }
476
477 // align rendertime to the center between VSYNC edges
478 renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
479 renderTime += mVsyncPeriod / 2;
480 ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
481 ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
482 return renderTime;
483 }
484
release()485 void VideoFrameScheduler::release() {
486 mComposer.clear();
487 }
488
~VideoFrameScheduler()489 VideoFrameScheduler::~VideoFrameScheduler() {
490 release();
491 }
492
493 } // namespace android
494
495