1 /* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.perftests.utils; 18 19 import android.annotation.IntDef; 20 import android.app.Activity; 21 import android.app.Instrumentation; 22 import android.os.Bundle; 23 import android.util.ArrayMap; 24 import android.util.Log; 25 26 import com.android.internal.util.ArrayUtils; 27 28 import java.lang.annotation.ElementType; 29 import java.lang.annotation.Retention; 30 import java.lang.annotation.RetentionPolicy; 31 import java.lang.annotation.Target; 32 import java.util.ArrayList; 33 import java.util.Arrays; 34 import java.util.List; 35 import java.util.concurrent.TimeUnit; 36 37 /** 38 * Provides a benchmark framework. 39 * 40 * This differs from BenchmarkState in that rather than the class measuring the the elapsed time, 41 * the test passes in the elapsed time. 42 * 43 * Example usage: 44 * 45 * public void sampleMethod() { 46 * ManualBenchmarkState state = new ManualBenchmarkState(); 47 * 48 * int[] src = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; 49 * long elapsedTime = 0; 50 * while (state.keepRunning(elapsedTime)) { 51 * long startTime = System.nanoTime(); 52 * int[] dest = new int[src.length]; 53 * System.arraycopy(src, 0, dest, 0, src.length); 54 * elapsedTime = System.nanoTime() - startTime; 55 * } 56 * System.out.println(state.summaryLine()); 57 * } 58 * 59 * Or use the PerfManualStatusReporter TestRule. 60 * 61 * Make sure that the overhead of checking the clock does not noticeably affect the results. 62 */ 63 public final class ManualBenchmarkState { 64 private static final String TAG = ManualBenchmarkState.class.getSimpleName(); 65 66 @Target(ElementType.ANNOTATION_TYPE) 67 @Retention(RetentionPolicy.RUNTIME) 68 public @interface StatsReport { 69 int FLAG_MEDIAN = 0x00000001; 70 int FLAG_MEAN = 0x00000002; 71 int FLAG_MIN = 0x00000004; 72 int FLAG_MAX = 0x00000008; 73 int FLAG_STDDEV = 0x00000010; 74 int FLAG_COEFFICIENT_VAR = 0x00000020; 75 int FLAG_ITERATION = 0x00000040; 76 77 @Retention(RetentionPolicy.RUNTIME) 78 @IntDef(value = { 79 FLAG_MEDIAN, 80 FLAG_MEAN, 81 FLAG_MIN, 82 FLAG_MAX, 83 FLAG_STDDEV, 84 FLAG_COEFFICIENT_VAR, 85 FLAG_ITERATION, 86 }) 87 @interface Flag {} 88 89 /** Defines which type of statistics should output. */ flags()90 @Flag int flags() default -1; 91 /** An array with value 0~100 to provide the percentiles. */ percentiles()92 int[] percentiles() default {}; 93 } 94 95 /** The interface to receive the events of customized iteration. */ 96 public interface CustomizedIterationListener { 97 /** The customized iteration starts. */ onStart(int iteration)98 void onStart(int iteration); 99 100 /** The customized iteration finished. */ onFinished(int iteration)101 void onFinished(int iteration); 102 } 103 104 /** It means the entire {@link StatsReport} is not given. */ 105 private static final int DEFAULT_STATS_REPORT = -2; 106 107 // TODO: Tune these values. 108 // warm-up for duration 109 private static final long WARMUP_DURATION_NS = TimeUnit.SECONDS.toNanos(5); 110 // minimum iterations to warm-up for 111 private static final int WARMUP_MIN_ITERATIONS = 8; 112 113 // target testing for duration 114 private static final long TARGET_TEST_DURATION_NS = TimeUnit.SECONDS.toNanos(16); 115 private static final int MAX_TEST_ITERATIONS = 1000000; 116 private static final int MIN_TEST_ITERATIONS = 10; 117 118 private static final int NOT_STARTED = 0; // The benchmark has not started yet. 119 private static final int WARMUP = 1; // The benchmark is warming up. 120 private static final int RUNNING = 2; // The benchmark is running. 121 private static final int RUNNING_CUSTOMIZED = 3; // Running for customized measurement. 122 private static final int FINISHED = 4; // The benchmark has stopped. 123 124 private int mState = NOT_STARTED; // Current benchmark state. 125 126 private long mWarmupDurationNs = WARMUP_DURATION_NS; 127 private long mTargetTestDurationNs = TARGET_TEST_DURATION_NS; 128 private long mWarmupStartTime = 0; 129 private int mWarmupIterations = 0; 130 131 private int mMaxIterations = 0; 132 133 /** 134 * Additinal iteration that used to apply customized measurement. The result during these 135 * iterations won't be counted into {@link #mStats}. 136 */ 137 private int mMaxCustomizedIterations; 138 private int mCustomizedIterations; 139 private CustomizedIterationListener mCustomizedIterationListener; 140 141 // Individual duration in nano seconds. 142 private ArrayList<Long> mResults = new ArrayList<>(); 143 144 /** @see #addExtraResult(String, long) */ 145 private ArrayMap<String, ArrayList<Long>> mExtraResults; 146 147 private final List<Long> mTmpDurations = Arrays.asList(0L); 148 149 // Statistics. These values will be filled when the benchmark has finished. 150 // The computation needs double precision, but long int is fine for final reporting. 151 private Stats mStats; 152 153 private int mStatsReportFlags = 154 StatsReport.FLAG_MEDIAN | StatsReport.FLAG_MEAN | StatsReport.FLAG_STDDEV; 155 private int[] mStatsReportPercentiles = {90 , 95}; 156 shouldReport(int statsReportFlag)157 private boolean shouldReport(int statsReportFlag) { 158 return (mStatsReportFlags & statsReportFlag) != 0; 159 } 160 configure(ManualBenchmarkTest testAnnotation)161 void configure(ManualBenchmarkTest testAnnotation) { 162 if (testAnnotation == null) { 163 return; 164 } 165 166 final long warmupDurationNs = testAnnotation.warmupDurationNs(); 167 if (warmupDurationNs >= 0) { 168 mWarmupDurationNs = warmupDurationNs; 169 } 170 final long targetTestDurationNs = testAnnotation.targetTestDurationNs(); 171 if (targetTestDurationNs >= 0) { 172 mTargetTestDurationNs = targetTestDurationNs; 173 } 174 final StatsReport statsReport = testAnnotation.statsReport(); 175 if (statsReport != null && statsReport.flags() != DEFAULT_STATS_REPORT) { 176 mStatsReportFlags = statsReport.flags(); 177 mStatsReportPercentiles = statsReport.percentiles(); 178 } 179 } 180 beginBenchmark(long warmupDuration, int iterations)181 private void beginBenchmark(long warmupDuration, int iterations) { 182 mMaxIterations = (int) (mTargetTestDurationNs / (warmupDuration / iterations)); 183 mMaxIterations = Math.min(MAX_TEST_ITERATIONS, 184 Math.max(mMaxIterations, MIN_TEST_ITERATIONS)); 185 mState = RUNNING; 186 } 187 188 /** 189 * Judges whether the benchmark needs more samples. 190 * 191 * For the usage, see class comment. 192 */ keepRunning(long duration)193 public boolean keepRunning(long duration) { 194 if (duration < 0) { 195 throw new RuntimeException("duration is negative: " + duration); 196 } 197 mTmpDurations.set(0, duration); 198 return keepRunning(mTmpDurations); 199 } 200 201 /** 202 * Similar to the {@link #keepRunning(long)} but accepts a list of durations 203 */ keepRunning(List<Long> durations)204 public boolean keepRunning(List<Long> durations) { 205 switch (mState) { 206 case NOT_STARTED: 207 mState = WARMUP; 208 mWarmupStartTime = System.nanoTime(); 209 return true; 210 case WARMUP: { 211 if (ArrayUtils.isEmpty(durations)) { 212 return true; 213 } 214 final long timeSinceStartingWarmup = System.nanoTime() - mWarmupStartTime; 215 mWarmupIterations += durations.size(); 216 if (mWarmupIterations >= WARMUP_MIN_ITERATIONS 217 && timeSinceStartingWarmup >= mWarmupDurationNs) { 218 beginBenchmark(timeSinceStartingWarmup, mWarmupIterations); 219 } 220 return true; 221 } 222 case RUNNING: { 223 if (ArrayUtils.isEmpty(durations)) { 224 return true; 225 } 226 mResults.addAll(durations); 227 final boolean keepRunning = mResults.size() < mMaxIterations; 228 if (!keepRunning) { 229 mStats = new Stats(mResults); 230 if (mMaxCustomizedIterations > 0 && mCustomizedIterationListener != null) { 231 mState = RUNNING_CUSTOMIZED; 232 mCustomizedIterationListener.onStart(mCustomizedIterations); 233 return true; 234 } 235 mState = FINISHED; 236 } 237 return keepRunning; 238 } 239 case RUNNING_CUSTOMIZED: { 240 mCustomizedIterationListener.onFinished(mCustomizedIterations); 241 mCustomizedIterations++; 242 if (mCustomizedIterations >= mMaxCustomizedIterations) { 243 mState = FINISHED; 244 return false; 245 } 246 mCustomizedIterationListener.onStart(mCustomizedIterations); 247 return true; 248 } 249 case FINISHED: 250 throw new IllegalStateException("The benchmark has finished."); 251 default: 252 throw new IllegalStateException("The benchmark is in an unknown state."); 253 } 254 } 255 256 /** 257 * @return {@code true} if the benchmark is in warmup state. It can be used to skip the 258 * operations or measurements that are unnecessary while the test isn't running the 259 * actual benchmark. 260 */ isWarmingUp()261 public boolean isWarmingUp() { 262 return mState == WARMUP; 263 } 264 265 /** 266 * This is used to run the benchmark with more information by enabling some debug mechanism but 267 * we don't want to account the special runs (slower) in the stats report. 268 */ setCustomizedIterations(int iterations, CustomizedIterationListener listener)269 public void setCustomizedIterations(int iterations, CustomizedIterationListener listener) { 270 mMaxCustomizedIterations = iterations; 271 mCustomizedIterationListener = listener; 272 } 273 274 /** 275 * Adds additional result while this benchmark isn't warming up or running in customized state. 276 * It is used when a sequence of operations is executed consecutively, the duration of each 277 * operation can also be recorded. 278 */ addExtraResult(String key, long duration)279 public void addExtraResult(String key, long duration) { 280 if (isWarmingUp() || mState == RUNNING_CUSTOMIZED) { 281 return; 282 } 283 if (mExtraResults == null) { 284 mExtraResults = new ArrayMap<>(); 285 } 286 mExtraResults.computeIfAbsent(key, k -> new ArrayList<>()).add(duration); 287 } 288 summaryLine(String key, Stats stats, ArrayList<Long> results)289 private static String summaryLine(String key, Stats stats, ArrayList<Long> results) { 290 final StringBuilder sb = new StringBuilder(key); 291 sb.append(" Summary: "); 292 sb.append("median=").append(stats.getMedian()).append("ns, "); 293 sb.append("mean=").append(stats.getMean()).append("ns, "); 294 sb.append("min=").append(stats.getMin()).append("ns, "); 295 sb.append("max=").append(stats.getMax()).append("ns, "); 296 sb.append("sigma=").append(stats.getStandardDeviation()).append(", "); 297 sb.append("iteration=").append(results.size()).append(", "); 298 sb.append("values="); 299 if (results.size() > 100) { 300 sb.append(results.subList(0, 100)).append(" ..."); 301 } else { 302 sb.append(results); 303 } 304 return sb.toString(); 305 } 306 fillStatus(Bundle status, String key, Stats stats)307 private void fillStatus(Bundle status, String key, Stats stats) { 308 if (shouldReport(StatsReport.FLAG_ITERATION)) { 309 status.putLong(key + "_iteration", stats.getSize()); 310 } 311 if (shouldReport(StatsReport.FLAG_MEDIAN)) { 312 status.putLong(key + "_median", stats.getMedian()); 313 } 314 if (shouldReport(StatsReport.FLAG_MEAN)) { 315 status.putLong(key + "_mean", Math.round(stats.getMean())); 316 } 317 if (shouldReport(StatsReport.FLAG_MIN)) { 318 status.putLong(key + "_min", stats.getMin()); 319 } 320 if (shouldReport(StatsReport.FLAG_MAX)) { 321 status.putLong(key + "_max", stats.getMax()); 322 } 323 if (mStatsReportPercentiles != null) { 324 for (int percentile : mStatsReportPercentiles) { 325 status.putLong(key + "_percentile" + percentile, stats.getPercentile(percentile)); 326 } 327 } 328 if (shouldReport(StatsReport.FLAG_STDDEV)) { 329 status.putLong(key + "_stddev", Math.round(stats.getStandardDeviation())); 330 } 331 if (shouldReport(StatsReport.FLAG_COEFFICIENT_VAR)) { 332 status.putLong(key + "_cv", 333 Math.round((100 * stats.getStandardDeviation() / stats.getMean()))); 334 } 335 } 336 sendFullStatusReport(Instrumentation instrumentation, String key)337 public void sendFullStatusReport(Instrumentation instrumentation, String key) { 338 if (mState != FINISHED) { 339 throw new IllegalStateException("The benchmark hasn't finished"); 340 } 341 Log.i(TAG, summaryLine(key, mStats, mResults)); 342 final Bundle status = new Bundle(); 343 fillStatus(status, key, mStats); 344 if (mExtraResults != null) { 345 for (int i = 0; i < mExtraResults.size(); i++) { 346 final String subKey = key + "_" + mExtraResults.keyAt(i); 347 final ArrayList<Long> results = mExtraResults.valueAt(i); 348 final Stats stats = new Stats(results); 349 Log.i(TAG, summaryLine(subKey, stats, results)); 350 fillStatus(status, subKey, stats); 351 } 352 } 353 instrumentation.sendStatus(Activity.RESULT_OK, status); 354 } 355 356 /** The annotation to customize the test, e.g. the duration of warm-up and target test. */ 357 @Target(ElementType.METHOD) 358 @Retention(RetentionPolicy.RUNTIME) 359 public @interface ManualBenchmarkTest { warmupDurationNs()360 long warmupDurationNs() default -1; targetTestDurationNs()361 long targetTestDurationNs() default -1; statsReport()362 StatsReport statsReport() default @StatsReport(flags = DEFAULT_STATS_REPORT); 363 } 364 } 365