1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "VideoRenderQualityTracker"
18 #define ATRACE_TAG ATRACE_TAG_VIDEO
19 
20 #include <utils/Log.h>
21 #include <utils/Trace.h>
22 #include <utils/Mutex.h>
23 
24 #include <media/stagefright/VideoRenderQualityTracker.h>
25 
26 #include <assert.h>
27 #include <charconv>
28 #include <cmath>
29 #include <stdio.h>
30 #include <sys/time.h>
31 #include <sys/wait.h>
32 
33 #include <android-base/macros.h>
34 #include <android-base/parsebool.h>
35 #include <android-base/parseint.h>
36 #include <android-base/properties.h>
37 
38 namespace android {
39 
40 using android::base::ParseBoolResult;
41 
42 static constexpr float FRAME_RATE_UNDETERMINED = VideoRenderQualityMetrics::FRAME_RATE_UNDETERMINED;
43 static constexpr float FRAME_RATE_24_3_2_PULLDOWN =
44         VideoRenderQualityMetrics::FRAME_RATE_24_3_2_PULLDOWN;
45 
46 typedef VideoRenderQualityTracker::Configuration::GetServerConfigurableFlagFn
47         GetServerConfigurableFlagFn;
48 typedef VideoRenderQualityTracker::TraceTriggerFn TraceTriggerFn;
49 
getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,char const * flagNameSuffix,bool * value)50 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
51                                       char const *flagNameSuffix, bool *value) {
52     std::string flagName("render_metrics_");
53     flagName.append(flagNameSuffix);
54     std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName,
55                                                           *value ? "true" : "false");
56     switch (android::base::ParseBool(valueStr)) {
57     case ParseBoolResult::kTrue: *value = true; break;
58     case ParseBoolResult::kFalse: *value = false; break;
59     case ParseBoolResult::kError:
60         ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
61               valueStr.c_str());
62         break;
63     }
64 }
65 
getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,char const * flagNameSuffix,int32_t * value)66 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
67                                       char const *flagNameSuffix, int32_t *value) {
68     char defaultStr[11];
69     sprintf(defaultStr, "%d", int(*value));
70     std::string flagName("render_metrics_");
71     flagName.append(flagNameSuffix);
72     std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, defaultStr);
73     if (!android::base::ParseInt(valueStr.c_str(), value) || valueStr.size() == 0) {
74         ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
75               valueStr.c_str());
76         return;
77     }
78 }
79 
80 template<typename T>
getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,char const * flagNameSuffix,std::vector<T> * value)81 static void getServerConfigurableFlag(GetServerConfigurableFlagFn getServerConfigurableFlagFn,
82                                       char const *flagNameSuffix, std::vector<T> *value) {
83     std::stringstream sstr;
84     for (int i = 0; i < value->size(); ++i) {
85         if (i != 0) {
86             sstr << ",";
87         }
88         sstr << (*value)[i];
89     }
90     std::string flagName("render_metrics_");
91     flagName.append(flagNameSuffix);
92     std::string valueStr = (*getServerConfigurableFlagFn)("media_native", flagName, sstr.str());
93     if (valueStr.size() == 0) {
94         return;
95     }
96     // note: using android::base::Tokenize fails to catch parsing failures for values ending in ','
97     std::vector<T> newValues;
98     const char *p = valueStr.c_str();
99     const char *last = p + valueStr.size();
100     while (p != last) {
101         if (*p == ',') {
102             p++;
103         }
104         T value = -1;
105         auto [ptr, error] = std::from_chars(p, last, value);
106         if (error == std::errc::invalid_argument || error == std::errc::result_out_of_range) {
107             ALOGW("failed to parse server-configurable flag '%s' from '%s'", flagNameSuffix,
108                   valueStr.c_str());
109             return;
110         }
111         p = ptr;
112         newValues.push_back(value);
113     }
114     *value = std::move(newValues);
115 }
116 
VideoRenderQualityMetrics()117 VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
118     clear();
119 }
120 
clear()121 void VideoRenderQualityMetrics::clear() {
122     firstRenderTimeUs = 0;
123     frameReleasedCount = 0;
124     frameRenderedCount = 0;
125     frameDroppedCount = 0;
126     frameSkippedCount = 0;
127     contentFrameRate = FRAME_RATE_UNDETERMINED;
128     desiredFrameRate = FRAME_RATE_UNDETERMINED;
129     actualFrameRate = FRAME_RATE_UNDETERMINED;
130     maxContentDroppedAfterPauseMs = 0;
131     freezeEventCount = 0;
132     freezeDurationMsHistogram.clear();
133     freezeDistanceMsHistogram.clear();
134     judderEventCount = 0;
135     judderScoreHistogram.clear();
136 }
137 
138 VideoRenderQualityTracker::Configuration
getFromServerConfigurableFlags(GetServerConfigurableFlagFn getServerConfigurableFlagFn)139         VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
140             GetServerConfigurableFlagFn getServerConfigurableFlagFn) {
141     VideoRenderQualityTracker::Configuration c;
142 #define getFlag(FIELDNAME, FLAGNAME) \
143     getServerConfigurableFlag(getServerConfigurableFlagFn, FLAGNAME, &c.FIELDNAME)
144     getFlag(enabled, "enabled");
145     getFlag(areSkippedFramesDropped, "are_skipped_frames_dropped");
146     getFlag(maxExpectedContentFrameDurationUs, "max_expected_content_frame_duration_us");
147     getFlag(frameRateDetectionToleranceUs, "frame_rate_detection_tolerance_us");
148     getFlag(liveContentFrameDropToleranceUs, "live_content_frame_drop_tolerance_us");
149     getFlag(pauseAudioLatencyUs, "pause_audio_latency_us");
150     getFlag(freezeDurationMsHistogramBuckets, "freeze_duration_ms_histogram_buckets");
151     getFlag(freezeDurationMsHistogramToScore, "freeze_duration_ms_histogram_to_score");
152     getFlag(freezeDistanceMsHistogramBuckets, "freeze_distance_ms_histogram_buckets");
153     getFlag(freezeEventMax, "freeze_event_max");
154     getFlag(freezeEventDetailsMax, "freeze_event_details_max");
155     getFlag(freezeEventDistanceToleranceMs, "freeze_event_distance_tolerance_ms");
156     getFlag(judderErrorToleranceUs, "judder_error_tolerance_us");
157     getFlag(judderScoreHistogramBuckets, "judder_score_histogram_buckets");
158     getFlag(judderScoreHistogramToScore, "judder_score_histogram_to_score");
159     getFlag(judderEventMax, "judder_event_max");
160     getFlag(judderEventDetailsMax, "judder_event_details_max");
161     getFlag(judderEventDistanceToleranceMs, "judder_event_distance_tolerance_ms");
162     getFlag(traceTriggerEnabled, "trace_trigger_enabled");
163     getFlag(traceTriggerThrottleMs, "trace_trigger_throttle_ms");
164     getFlag(traceMinFreezeDurationMs, "trace_minimum_freeze_duration_ms");
165 #undef getFlag
166     return c;
167 }
168 
Configuration()169 VideoRenderQualityTracker::Configuration::Configuration() {
170     enabled = false;
171 
172     // Assume that the app is skipping frames because it's detected that the frame couldn't be
173     // rendered in time.
174     areSkippedFramesDropped = true;
175 
176     // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
177     maxExpectedContentFrameDurationUs = 400 * 1000;
178 
179     // Allow for 2 milliseconds of deviation when detecting frame rates
180     frameRateDetectionToleranceUs = 2 * 1000;
181 
182     // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
183     // because of frame drops for live content, or because the user is seeking.
184     liveContentFrameDropToleranceUs = 200 * 1000;
185 
186     // After a pause is initiated, audio should likely stop playback within 200ms.
187     pauseAudioLatencyUs = 200 * 1000;
188 
189     // Freeze configuration
190     freezeDurationMsHistogramBuckets = {1, 20, 40, 60, 80, 100, 120, 150, 175, 225, 300, 400, 500};
191     freezeDurationMsHistogramToScore = {1,  1,  1,  1,  1,   1,   1,   1,   1,   1,   1,   1,   1};
192     freezeDistanceMsHistogramBuckets = {0, 20, 100, 400, 1000, 2000, 3000, 4000, 8000, 15000, 30000,
193                                         60000};
194     freezeEventMax = 0; // enabled only when debugging
195     freezeEventDetailsMax = 20;
196     freezeEventDistanceToleranceMs = 60000; // lump freeze occurrences together when 60s or less
197 
198     // Judder configuration
199     judderErrorToleranceUs = 2000;
200     judderScoreHistogramBuckets = {1, 4, 5, 9, 11, 20, 30, 40, 50, 60, 70, 80};
201     judderScoreHistogramToScore = {1, 1, 1, 1,  1,  1,  1,  1,  1,  1,  1,  1};
202     judderEventMax = 0; // enabled only when debugging
203     judderEventDetailsMax = 20;
204     judderEventDistanceToleranceMs = 5000; // lump judder occurrences together when 5s or less
205 
206     // Perfetto trigger configuration.
207     traceTriggerEnabled = android::base::GetProperty(
208         "ro.build.type", "user") != "user"; // Enabled for non-user builds for debugging.
209     traceTriggerThrottleMs = 5 * 60 * 1000; // 5 mins.
210     traceMinFreezeDurationMs = 400;
211 }
212 
VideoRenderQualityTracker()213 VideoRenderQualityTracker::VideoRenderQualityTracker()
214     : mConfiguration(Configuration()), mTraceTriggerFn(triggerTrace) {
215     configureHistograms(mMetrics, mConfiguration);
216     clear();
217 }
218 
VideoRenderQualityTracker(const Configuration & configuration,const TraceTriggerFn traceTriggerFn)219 VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration,
220                                                      const TraceTriggerFn traceTriggerFn)
221     : mConfiguration(configuration),
222       mTraceTriggerFn(traceTriggerFn == nullptr ? triggerTrace : traceTriggerFn) {
223     configureHistograms(mMetrics, mConfiguration);
224     clear();
225 }
226 
onTunnelFrameQueued(int64_t contentTimeUs)227 void VideoRenderQualityTracker::onTunnelFrameQueued(int64_t contentTimeUs) {
228     if (!mConfiguration.enabled) {
229         return;
230     }
231 
232     // Since P-frames are queued out of order, hold onto the P-frame until we can track it in
233     // render order. This only works because it depends on today's encoding algorithms that only
234     // allow B-frames to refer to ONE P-frame that comes after it. If the cardinality of P-frames
235     // in a single mini-GOP is increased, this algorithm breaks down.
236     if (mTunnelFrameQueuedContentTimeUs == -1) {
237         mTunnelFrameQueuedContentTimeUs = contentTimeUs;
238     } else if (contentTimeUs < mTunnelFrameQueuedContentTimeUs) {
239         onFrameReleased(contentTimeUs, 0);
240     } else {
241         onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
242         mTunnelFrameQueuedContentTimeUs = contentTimeUs;
243     }
244 }
245 
onFrameSkipped(int64_t contentTimeUs)246 void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
247     if (!mConfiguration.enabled) {
248         return;
249     }
250 
251     // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
252     // app might be seeking to a starting point that isn't the first key frame.
253     if (mLastRenderTimeUs == -1) {
254         return;
255     }
256 
257     resetIfDiscontinuity(contentTimeUs, -1);
258 
259     if (mTraceFrameSkippedToken == -1) {
260        mTraceFrameSkippedToken = contentTimeUs;
261        ATRACE_ASYNC_BEGIN("Video frame(s) skipped", mTraceFrameSkippedToken);
262     }
263 
264     // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
265     // app could be terminating the playback. The pending count will be added to the metrics if and
266     // when the next frame is rendered.
267     mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
268 }
269 
onFrameReleased(int64_t contentTimeUs)270 void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
271     onFrameReleased(contentTimeUs, nowUs() * 1000);
272 }
273 
onFrameReleased(int64_t contentTimeUs,int64_t desiredRenderTimeNs)274 void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
275                                                 int64_t desiredRenderTimeNs) {
276     if (!mConfiguration.enabled) {
277         return;
278     }
279 
280     int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
281     resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
282     mMetrics.frameReleasedCount++;
283     mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
284     mLastContentTimeUs = contentTimeUs;
285 }
286 
onFrameRendered(int64_t contentTimeUs,int64_t actualRenderTimeNs,FreezeEvent * freezeEventOut,JudderEvent * judderEventOut)287 void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs,
288                                                 FreezeEvent *freezeEventOut,
289                                                 JudderEvent *judderEventOut) {
290     if (!mConfiguration.enabled) {
291         return;
292     }
293 
294     if (mTraceFrameSkippedToken != -1) {
295         ATRACE_ASYNC_END("Video frame(s) skipped", mTraceFrameSkippedToken);
296         mTraceFrameSkippedToken = -1;
297     }
298 
299     int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;
300 
301     if (mLastRenderTimeUs != -1) {
302         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
303     }
304 
305     // We can render a pending queued frame if it's the last frame of the video, so release it
306     // immediately.
307     if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
308         onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
309         mTunnelFrameQueuedContentTimeUs = -1;
310     }
311 
312     static const FrameInfo noFrame = {-1, -1};
313     FrameInfo nextExpectedFrame = noFrame;
314     while (!mNextExpectedRenderedFrameQueue.empty()) {
315         nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
316         mNextExpectedRenderedFrameQueue.pop();
317         // Happy path - the rendered frame is what we expected it to be
318         if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
319             break;
320         }
321         // This isn't really supposed to happen - the next rendered frame should be the expected
322         // frame, or, if there's frame drops, it will be a frame later in the content stream
323         if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
324             ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
325                   (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
326             break;
327         }
328         // Process all skipped frames before the dropped frame.
329         while (!mPendingSkippedFrameContentTimeUsList.empty()) {
330             if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
331                 break;
332             }
333             processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
334             mPendingSkippedFrameContentTimeUsList.pop_front();
335         }
336         processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
337                                       nextExpectedFrame.desiredRenderTimeUs);
338     }
339     // Process all skipped frames before the rendered frame.
340     while (!mPendingSkippedFrameContentTimeUsList.empty()) {
341         if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
342             break;
343         }
344         processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
345         mPendingSkippedFrameContentTimeUsList.pop_front();
346     }
347     processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
348                                    nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
349                                    freezeEventOut, judderEventOut);
350     mLastRenderTimeUs = actualRenderTimeUs;
351 }
352 
getAndResetFreezeEvent()353 VideoRenderQualityTracker::FreezeEvent VideoRenderQualityTracker::getAndResetFreezeEvent() {
354     FreezeEvent event = std::move(mFreezeEvent);
355     mFreezeEvent.valid = false;
356     return event;
357 }
358 
getAndResetJudderEvent()359 VideoRenderQualityTracker::JudderEvent VideoRenderQualityTracker::getAndResetJudderEvent() {
360     JudderEvent event = std::move(mJudderEvent);
361     mJudderEvent.valid = false;
362     return event;
363 }
364 
getMetrics()365 const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() {
366     if (!mConfiguration.enabled) {
367         return mMetrics;
368     }
369 
370     mMetrics.freezeScore = 0;
371     if (mConfiguration.freezeDurationMsHistogramToScore.size() ==
372         mMetrics.freezeDurationMsHistogram.size()) {
373         for (int i = 0; i < mMetrics.freezeDurationMsHistogram.size(); ++i) {
374             mMetrics.freezeScore += mMetrics.freezeDurationMsHistogram[i] *
375                     mConfiguration.freezeDurationMsHistogramToScore[i];
376         }
377     }
378     mMetrics.freezeRate = float(double(mMetrics.freezeDurationMsHistogram.getSum()) /
379             mRenderDurationMs);
380 
381     mMetrics.judderScore = 0;
382     if (mConfiguration.judderScoreHistogramToScore.size() == mMetrics.judderScoreHistogram.size()) {
383         for (int i = 0; i < mMetrics.judderScoreHistogram.size(); ++i) {
384             mMetrics.judderScore += mMetrics.judderScoreHistogram[i] *
385                     mConfiguration.judderScoreHistogramToScore[i];
386         }
387     }
388     mMetrics.judderRate = float(double(mMetrics.judderScoreHistogram.getCount()) /
389             (mMetrics.frameReleasedCount + mMetrics.frameSkippedCount));
390 
391     return mMetrics;
392 }
393 
clear()394 void VideoRenderQualityTracker::clear() {
395     mRenderDurationMs = 0;
396     mMetrics.clear();
397     resetForDiscontinuity();
398 }
399 
resetForDiscontinuity()400 void VideoRenderQualityTracker::resetForDiscontinuity() {
401     mLastContentTimeUs = -1;
402     mLastRenderTimeUs = -1;
403     mLastFreezeEndTimeUs = -1;
404     mLastJudderEndTimeUs = -1;
405     mDroppedContentDurationUs = 0;
406     mFreezeEvent.valid = false;
407     mJudderEvent.valid = false;
408 
409     // Don't worry about tracking frame rendering times from now up until playback catches up to
410     // the discontinuity. While stuttering or freezing could be found in the next few frames, the
411     // impact to the user is is minimal, so better to just keep things simple and don't bother.
412     mNextExpectedRenderedFrameQueue = {};
413     mTunnelFrameQueuedContentTimeUs = -1;
414 
415     // Ignore any frames that were skipped just prior to the discontinuity.
416     mPendingSkippedFrameContentTimeUsList = {};
417 
418     // All frame durations can be now ignored since all bets are off now on what the render
419     // durations should be after the discontinuity.
420     for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
421         mActualFrameDurationUs[i] = -1;
422         mDesiredFrameDurationUs[i] = -1;
423         mContentFrameDurationUs[i] = -1;
424     }
425     mActualFrameDurationUs.priorTimestampUs = -1;
426     mDesiredFrameDurationUs.priorTimestampUs = -1;
427     mContentFrameDurationUs.priorTimestampUs = -1;
428 }
429 
resetIfDiscontinuity(int64_t contentTimeUs,int64_t desiredRenderTimeUs)430 bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
431                                                      int64_t desiredRenderTimeUs) {
432     if (mLastContentTimeUs == -1) {
433         resetForDiscontinuity();
434         return true;
435     }
436     if (contentTimeUs < mLastContentTimeUs) {
437         ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
438               int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
439               int(contentTimeUs / 1000));
440         resetForDiscontinuity();
441         return true;
442     }
443     if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
444         // The content frame duration could be long due to frame drops for live content. This can be
445         // detected by looking at the app's desired rendering duration. If the app's rendered frame
446         // duration is roughly the same as the content's frame duration, then it is assumed that
447         // the forward discontinuity is due to frame drops for live content. A false positive can
448         // occur if the time the user spends seeking is equal to the duration of the seek. This is
449         // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
450         // distracted, and then returns to seeking forward.
451         bool skippedForwardDueToLiveContentFrameDrops = false;
452         if (desiredRenderTimeUs != -1) {
453             int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
454             int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
455             skippedForwardDueToLiveContentFrameDrops =
456                     abs(contentFrameDurationUs - desiredFrameDurationUs) <
457                     mConfiguration.liveContentFrameDropToleranceUs;
458         }
459         if (!skippedForwardDueToLiveContentFrameDrops) {
460             ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
461                 int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
462                 int(contentTimeUs / 1000));
463             resetForDiscontinuity();
464             return true;
465         }
466     }
467     return false;
468 }
469 
processMetricsForSkippedFrame(int64_t contentTimeUs)470 void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
471     mMetrics.frameSkippedCount++;
472     if (mConfiguration.areSkippedFramesDropped) {
473         processMetricsForDroppedFrame(contentTimeUs, -1);
474         return;
475     }
476     updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
477     updateFrameDurations(mDesiredFrameDurationUs, -1);
478     updateFrameDurations(mActualFrameDurationUs, -1);
479     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
480     mDroppedContentDurationUs = 0;
481 }
482 
processMetricsForDroppedFrame(int64_t contentTimeUs,int64_t desiredRenderTimeUs)483 void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
484                                                               int64_t desiredRenderTimeUs) {
485     mMetrics.frameDroppedCount++;
486     updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
487     updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
488     updateFrameDurations(mActualFrameDurationUs, -1);
489     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
490     updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
491     if (mContentFrameDurationUs[0] != -1) {
492         mDroppedContentDurationUs += mContentFrameDurationUs[0];
493     }
494 }
495 
processMetricsForRenderedFrame(int64_t contentTimeUs,int64_t desiredRenderTimeUs,int64_t actualRenderTimeUs,FreezeEvent * freezeEventOut,JudderEvent * judderEventOut)496 void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
497                                                                int64_t desiredRenderTimeUs,
498                                                                int64_t actualRenderTimeUs,
499                                                                FreezeEvent *freezeEventOut,
500                                                                JudderEvent *judderEventOut) {
501     const Configuration& c = mConfiguration;
502 
503     // Capture the timestamp at which the first frame was rendered
504     if (mMetrics.firstRenderTimeUs == 0) {
505         mMetrics.firstRenderTimeUs = actualRenderTimeUs;
506     }
507     // Capture the timestamp at which the last frame was rendered
508     mMetrics.lastRenderTimeUs = actualRenderTimeUs;
509 
510     mMetrics.frameRenderedCount++;
511 
512     // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
513     // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
514     // anything other than count it as a rendered frame.
515     if (contentTimeUs == -1) {
516         return;
517     }
518     updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
519     updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
520     updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
521     updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
522     updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
523     updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
524 
525     // A freeze occurs if frames were dropped NOT after a discontinuity
526     if (mDroppedContentDurationUs != 0 && mLastRenderTimeUs != -1) {
527         // When pausing, audio playback may continue for a brief period of time after video
528         // pauses while the audio buffers drain. When resuming, a small number of video frames
529         // might be dropped to catch up to the audio position. This is acceptable behacvior and
530         // should not count as a freeze.
531         bool isLikelyCatchingUpAfterPause = false;
532         // A pause can be detected if a freeze occurs for a longer period of time than the
533         // content duration of the dropped frames. This strategy works because, for freeze
534         // events (no video pause), the content duration of the dropped frames will closely track
535         // the wall clock time (freeze duration). When pausing, however, the wall clock time
536         // (freeze duration) will be longer than the content duration of the dropped frames
537         // required to catch up to the audio position.
538         const int64_t wallClockDurationUs = actualRenderTimeUs - mLastRenderTimeUs;
539         // 200ms is chosen because it is larger than what a hiccup in the display pipeline could
540         // likely be, but shorter than the duration for which a user could pause for.
541         static const int32_t MAX_PIPELINE_HICCUP_DURATION_US = 200 * 1000;
542         if (wallClockDurationUs > mDroppedContentDurationUs + MAX_PIPELINE_HICCUP_DURATION_US) {
543             // Capture the amount of content that is dropped after pause, so we can push apps to be
544             // better about this behavior.
545             if (mDroppedContentDurationUs / 1000 > mMetrics.maxContentDroppedAfterPauseMs) {
546                 mMetrics.maxContentDroppedAfterPauseMs = int32_t(mDroppedContentDurationUs / 1000);
547             }
548             isLikelyCatchingUpAfterPause = mDroppedContentDurationUs <= c.pauseAudioLatencyUs;
549         }
550         if (!isLikelyCatchingUpAfterPause) {
551             processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent,
552                         mMetrics, mConfiguration, mTraceTriggerFn);
553             mLastFreezeEndTimeUs = actualRenderTimeUs;
554         }
555     }
556     maybeCaptureFreezeEvent(actualRenderTimeUs, mLastFreezeEndTimeUs, mFreezeEvent, mMetrics,
557                             mConfiguration, freezeEventOut);
558 
559     // Judder is computed on the prior video frame, not the current video frame
560     int64_t judderScore = computePreviousJudderScore(mActualFrameDurationUs,
561                                                      mContentFrameDurationUs,
562                                                      mConfiguration);
563     if (judderScore != 0) {
564         int64_t judderTimeUs = actualRenderTimeUs - mActualFrameDurationUs[0] -
565                 mActualFrameDurationUs[1];
566         processJudder(judderScore, judderTimeUs, mLastJudderEndTimeUs, mActualFrameDurationUs,
567                       mContentFrameDurationUs, mJudderEvent, mMetrics, mConfiguration);
568         mLastJudderEndTimeUs = judderTimeUs + mActualFrameDurationUs[1];
569     }
570     maybeCaptureJudderEvent(actualRenderTimeUs, mLastJudderEndTimeUs, mJudderEvent, mMetrics,
571                             mConfiguration, judderEventOut);
572 
573     mDroppedContentDurationUs = 0;
574 }
575 
processFreeze(int64_t actualRenderTimeUs,int64_t lastRenderTimeUs,int64_t lastFreezeEndTimeUs,FreezeEvent & e,VideoRenderQualityMetrics & m,const Configuration & c,const TraceTriggerFn traceTriggerFn)576 void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
577                                               int64_t lastFreezeEndTimeUs, FreezeEvent &e,
578                                               VideoRenderQualityMetrics &m, const Configuration &c,
579                                               const TraceTriggerFn traceTriggerFn) {
580     int32_t durationMs = int32_t((actualRenderTimeUs - lastRenderTimeUs) / 1000);
581     m.freezeDurationMsHistogram.insert(durationMs);
582     int32_t distanceMs = -1;
583     if (lastFreezeEndTimeUs != -1) {
584         // The distance to the last freeze is measured from the end of the last freze to the start
585         // of this freeze.
586         distanceMs = int32_t((lastRenderTimeUs - lastFreezeEndTimeUs) / 1000);
587         m.freezeDistanceMsHistogram.insert(distanceMs);
588     }
589     if (c.freezeEventMax > 0) {
590         if (e.valid == false) {
591             m.freezeEventCount++;
592             e.valid = true;
593             e.initialTimeUs = lastRenderTimeUs;
594             e.durationMs = 0;
595             e.sumDurationMs = 0;
596             e.sumDistanceMs = 0;
597             e.count = 0;
598             e.details.durationMs.clear();
599             e.details.distanceMs.clear();
600         // The first occurrence in the event should not have the distance recorded as part of the
601         // event, because it belongs in a vacuum between two events. However we still want the
602         // distance recorded in the details to calculate times in all details in all events.
603         } else if (distanceMs != -1) {
604             e.durationMs += distanceMs;
605             e.sumDistanceMs += distanceMs;
606         }
607         e.durationMs += durationMs;
608         e.count++;
609         e.sumDurationMs += durationMs;
610         if (e.details.durationMs.size() < c.freezeEventDetailsMax) {
611             e.details.durationMs.push_back(durationMs);
612             e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
613         }
614     }
615 
616     if (c.traceTriggerEnabled && durationMs >= c.traceMinFreezeDurationMs) {
617         ALOGI("Video freezed %lld ms", (long long) durationMs);
618         triggerTraceWithThrottle(traceTriggerFn, c, actualRenderTimeUs);
619     }
620 }
621 
maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,int64_t lastFreezeEndTimeUs,FreezeEvent & e,const VideoRenderQualityMetrics & m,const Configuration & c,FreezeEvent * freezeEventOut)622 void VideoRenderQualityTracker::maybeCaptureFreezeEvent(int64_t actualRenderTimeUs,
623                                                         int64_t lastFreezeEndTimeUs, FreezeEvent &e,
624                                                         const VideoRenderQualityMetrics & m,
625                                                         const Configuration &c,
626                                                         FreezeEvent *freezeEventOut) {
627     if (lastFreezeEndTimeUs == -1 || !e.valid) {
628         return;
629     }
630     // Future freeze occurrences are still pulled into the current freeze event if under tolerance
631     int64_t distanceMs = (actualRenderTimeUs - lastFreezeEndTimeUs) / 1000;
632     if (distanceMs < c.freezeEventDistanceToleranceMs) {
633         return;
634     }
635     if (freezeEventOut != nullptr && m.freezeEventCount <= c.freezeEventMax) {
636         *freezeEventOut = std::move(e);
637     }
638     // start recording a new freeze event after pushing the current one back to the caller
639     e.valid = false;
640 }
641 
computePreviousJudderScore(const FrameDurationUs & actualFrameDurationUs,const FrameDurationUs & contentFrameDurationUs,const Configuration & c)642 int64_t VideoRenderQualityTracker::computePreviousJudderScore(
643         const FrameDurationUs &actualFrameDurationUs,
644         const FrameDurationUs &contentFrameDurationUs,
645         const Configuration &c) {
646     // If the frame before or after was dropped, then don't generate a judder score, since any
647     // problems with frame drops are scored as a freeze instead.
648     if (actualFrameDurationUs[0] == -1 || actualFrameDurationUs[1] == -1 ||
649         actualFrameDurationUs[2] == -1) {
650         return 0;
651     }
652 
653     // Don't score judder for when playback is paused or rebuffering (long frame duration), or if
654     // the player is intentionally playing each frame at a slow rate (e.g. half-rate). If the long
655     // frame duration was unintentional, it is assumed that this will be coupled with a later frame
656     // drop, and be scored as a freeze instead of judder.
657     if (actualFrameDurationUs[1] >= 2 * contentFrameDurationUs[1]) {
658         return 0;
659     }
660 
661     // The judder score is based on the error of this frame
662     int64_t errorUs = actualFrameDurationUs[1] - contentFrameDurationUs[1];
663     // Don't score judder if the previous frame has high error, but this frame has low error
664     if (abs(errorUs) < c.judderErrorToleranceUs) {
665         return 0;
666     }
667 
668     // Add a penalty if this frame has judder that amplifies the problem introduced by previous
669     // judder, instead of catching up for the previous judder (50, 16, 16, 50) vs (50, 16, 50, 16)
670     int64_t previousErrorUs = actualFrameDurationUs[2] - contentFrameDurationUs[2];
671     // Don't add the pentalty for errors from the previous frame if the previous frame has low error
672     if (abs(previousErrorUs) >= c.judderErrorToleranceUs) {
673         errorUs = abs(errorUs) + abs(errorUs + previousErrorUs);
674     }
675 
676     // Avoid scoring judder for 3:2 pulldown or other minimally-small frame duration errors
677     if (abs(errorUs) < contentFrameDurationUs[1] / 4) {
678         return 0;
679     }
680 
681     return abs(errorUs) / 1000; // error in millis to keep numbers small
682 }
683 
processJudder(int32_t judderScore,int64_t judderTimeUs,int64_t lastJudderEndTime,const FrameDurationUs & actualDurationUs,const FrameDurationUs & contentDurationUs,JudderEvent & e,VideoRenderQualityMetrics & m,const Configuration & c)684 void VideoRenderQualityTracker::processJudder(int32_t judderScore, int64_t judderTimeUs,
685                                               int64_t lastJudderEndTime,
686                                               const FrameDurationUs &actualDurationUs,
687                                               const FrameDurationUs &contentDurationUs,
688                                               JudderEvent &e, VideoRenderQualityMetrics &m,
689                                               const Configuration &c) {
690     int32_t distanceMs = -1;
691     if (lastJudderEndTime != -1) {
692         distanceMs = int32_t((judderTimeUs - lastJudderEndTime) / 1000);
693     }
694     m.judderScoreHistogram.insert(judderScore);
695     if (c.judderEventMax > 0) {
696         if (!e.valid) {
697             m.judderEventCount++;
698             e.valid = true;
699             e.initialTimeUs = judderTimeUs;
700             e.durationMs = 0;
701             e.sumScore = 0;
702             e.sumDistanceMs = 0;
703             e.count = 0;
704             e.details.contentRenderDurationUs.clear();
705             e.details.actualRenderDurationUs.clear();
706             e.details.distanceMs.clear();
707         // The first occurrence in the event should not have the distance recorded as part of the
708         // event, because it belongs in a vacuum between two events. However we still want the
709         // distance recorded in the details to calculate the times using all details in all events.
710         } else if (distanceMs != -1) {
711             e.durationMs += distanceMs;
712             e.sumDistanceMs += distanceMs;
713         }
714         e.durationMs += actualDurationUs[1] / 1000;
715         e.count++;
716         e.sumScore += judderScore;
717         if (e.details.contentRenderDurationUs.size() < c.judderEventDetailsMax) {
718             e.details.actualRenderDurationUs.push_back(actualDurationUs[1]);
719             e.details.contentRenderDurationUs.push_back(contentDurationUs[1]);
720             e.details.distanceMs.push_back(distanceMs); // -1 for first detail in the first event
721         }
722     }
723 }
724 
maybeCaptureJudderEvent(int64_t actualRenderTimeUs,int64_t lastJudderEndTimeUs,JudderEvent & e,const VideoRenderQualityMetrics & m,const Configuration & c,JudderEvent * judderEventOut)725 void VideoRenderQualityTracker::maybeCaptureJudderEvent(int64_t actualRenderTimeUs,
726                                                         int64_t lastJudderEndTimeUs, JudderEvent &e,
727                                                         const VideoRenderQualityMetrics &m,
728                                                         const Configuration &c,
729                                                         JudderEvent *judderEventOut) {
730     if (lastJudderEndTimeUs == -1 || !e.valid) {
731         return;
732     }
733     // Future judder occurrences are still pulled into the current judder event if under tolerance
734     int64_t distanceMs = (actualRenderTimeUs - lastJudderEndTimeUs) / 1000;
735     if (distanceMs < c.judderEventDistanceToleranceMs) {
736         return;
737     }
738     if (judderEventOut != nullptr && m.judderEventCount <= c.judderEventMax) {
739         *judderEventOut = std::move(e);
740     }
741     // start recording a new judder event after pushing the current one back to the caller
742     e.valid = false;
743 }
744 
configureHistograms(VideoRenderQualityMetrics & m,const Configuration & c)745 void VideoRenderQualityTracker::configureHistograms(VideoRenderQualityMetrics &m,
746                                                     const Configuration &c) {
747     m.freezeDurationMsHistogram.setup(c.freezeDurationMsHistogramBuckets);
748     m.freezeDistanceMsHistogram.setup(c.freezeDistanceMsHistogramBuckets);
749     m.judderScoreHistogram.setup(c.judderScoreHistogramBuckets);
750 }
751 
nowUs()752 int64_t VideoRenderQualityTracker::nowUs() {
753     struct timespec t;
754     t.tv_sec = t.tv_nsec = 0;
755     clock_gettime(CLOCK_MONOTONIC, &t);
756     return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
757 }
758 
updateFrameDurations(FrameDurationUs & durationUs,int64_t newTimestampUs)759 void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
760                                                      int64_t newTimestampUs) {
761     for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
762         durationUs[i] = durationUs[i - 1];
763     }
764     if (newTimestampUs == -1) {
765         durationUs[0] = -1;
766     } else {
767         durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
768                 newTimestampUs - durationUs.priorTimestampUs;
769         durationUs.priorTimestampUs = newTimestampUs;
770     }
771 }
772 
updateFrameRate(float & frameRate,const FrameDurationUs & durationUs,const Configuration & c)773 void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
774                                                 const Configuration &c) {
775     float newFrameRate = detectFrameRate(durationUs, c);
776     if (newFrameRate != FRAME_RATE_UNDETERMINED) {
777         frameRate = newFrameRate;
778     }
779 }
780 
detectFrameRate(const FrameDurationUs & durationUs,const Configuration & c)781 float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
782                                                  const Configuration &c) {
783     // At least 3 frames are necessary to detect stable frame rates
784     assert(FrameDurationUs::SIZE >= 3);
785     if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
786         return FRAME_RATE_UNDETERMINED;
787     }
788     // Only determine frame rate if the render durations are stable across 3 frames
789     if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
790         abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
791         return is32pulldown(durationUs, c) ? FRAME_RATE_24_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
792     }
793     return 1000.0 * 1000.0 / durationUs[0];
794 }
795 
is32pulldown(const FrameDurationUs & durationUs,const Configuration & c)796 bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
797                                              const Configuration &c) {
798     // At least 5 frames are necessary to detect stable 3:2 pulldown
799     assert(FrameDurationUs::SIZE >= 5);
800     if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
801         durationUs[4] == -1) {
802         return false;
803     }
804     // 3:2 pulldown expects that every other frame has identical duration...
805     if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
806         abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
807         abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
808         return false;
809     }
810     // ... for either 2 vsysncs or 3 vsyncs
811     if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
812          abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
813         (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
814          abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
815         return true;
816     }
817     return false;
818 }
819 
triggerTraceWithThrottle(const TraceTriggerFn traceTriggerFn,const Configuration & c,const int64_t triggerTimeUs)820 void VideoRenderQualityTracker::triggerTraceWithThrottle(const TraceTriggerFn traceTriggerFn,
821                                                          const Configuration &c,
822                                                          const int64_t triggerTimeUs) {
823     static int64_t lastTriggerUs = -1;
824     static Mutex updateLastTriggerLock;
825 
826     {
827         Mutex::Autolock autoLock(updateLastTriggerLock);
828         if (lastTriggerUs != -1) {
829             int32_t sinceLastTriggerMs = int32_t((triggerTimeUs - lastTriggerUs) / 1000);
830             // Throttle the trace trigger calls to reduce continuous PID fork calls in a short time
831             // to impact device performance, and reduce spamming trace reports.
832             if (sinceLastTriggerMs < c.traceTriggerThrottleMs) {
833                 ALOGI("Not triggering trace - not enough time since last trigger");
834                 return;
835             }
836         }
837         lastTriggerUs = triggerTimeUs;
838     }
839 
840     (*traceTriggerFn)();
841 }
842 
triggerTrace()843 void VideoRenderQualityTracker::triggerTrace() {
844     // Trigger perfetto to stop always-on-tracing (AOT) to collect trace into a file for video
845     // freeze event, the collected trace categories are configured by AOT.
846     static const char* args[] = {"/system/bin/trigger_perfetto",
847                                  "com.android.codec-video-freeze", NULL};
848 
849     pid_t pid = fork();
850     if (pid < 0) {
851         ALOGI("Failed to fork for triggering trace");
852     } else if (pid == 0) {
853         // Child process.
854         ALOGI("Trigger trace %s", args[1]);
855         execvp(args[0], const_cast<char**>(args));
856         ALOGW("Failed to trigger trace %s", args[1]);
857         _exit(1);
858     } else {
859         // Parent process.
860         int status;
861         // Wait for the child process (pid) gets terminated, and allow the system to release
862         // the resource associated with the child. Or the child process will remain in a
863         // zombie state and get killed by llkd to cause foreground app crash.
864         if (waitpid(pid, &status, 0) < 0) {
865             ALOGW("Failed to waitpid for triggering trace");
866         }
867     }
868 }
869 
870 } // namespace android
871