1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package com.android.tradefed.testtype.suite; 17 18 import com.android.ddmlib.Log.LogLevel; 19 import com.android.tradefed.build.BuildRetrievalError; 20 import com.android.tradefed.build.IBuildInfo; 21 import com.android.tradefed.config.Configuration; 22 import com.android.tradefed.config.ConfigurationDescriptor; 23 import com.android.tradefed.config.ConfigurationException; 24 import com.android.tradefed.config.DeviceConfigurationHolder; 25 import com.android.tradefed.config.DynamicRemoteFileResolver; 26 import com.android.tradefed.config.IConfiguration; 27 import com.android.tradefed.config.IConfigurationReceiver; 28 import com.android.tradefed.device.DeviceNotAvailableException; 29 import com.android.tradefed.device.ITestDevice; 30 import com.android.tradefed.device.ITestDevice.RecoveryMode; 31 import com.android.tradefed.device.StubDevice; 32 import com.android.tradefed.device.metric.IMetricCollector; 33 import com.android.tradefed.device.metric.LogcatOnFailureCollector; 34 import com.android.tradefed.device.metric.ScreenshotOnFailureCollector; 35 import com.android.tradefed.error.IHarnessException; 36 import com.android.tradefed.invoker.IInvocationContext; 37 import com.android.tradefed.invoker.InvocationContext; 38 import com.android.tradefed.invoker.TestInformation; 39 import com.android.tradefed.invoker.logger.CurrentInvocation; 40 import com.android.tradefed.invoker.logger.InvocationMetricLogger; 41 import com.android.tradefed.invoker.logger.InvocationMetricLogger.InvocationMetricKey; 42 import com.android.tradefed.invoker.logger.TfObjectTracker; 43 import com.android.tradefed.invoker.shard.token.TokenProperty; 44 import com.android.tradefed.log.ILogRegistry.EventType; 45 import com.android.tradefed.log.ITestLogger; 46 import com.android.tradefed.log.LogRegistry; 47 import com.android.tradefed.log.LogUtil.CLog; 48 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric; 49 import com.android.tradefed.result.FailureDescription; 50 import com.android.tradefed.result.ILogSaver; 51 import com.android.tradefed.result.ILogSaverListener; 52 import com.android.tradefed.result.ITestInvocationListener; 53 import com.android.tradefed.result.ITestLoggerReceiver; 54 import com.android.tradefed.result.LogFile; 55 import com.android.tradefed.result.MultiFailureDescription; 56 import com.android.tradefed.result.ResultForwarder; 57 import com.android.tradefed.result.TestDescription; 58 import com.android.tradefed.result.TestResult; 59 import com.android.tradefed.result.TestRunResult; 60 import com.android.tradefed.result.error.ErrorIdentifier; 61 import com.android.tradefed.result.error.InfraErrorIdentifier; 62 import com.android.tradefed.result.proto.TestRecordProto.FailureStatus; 63 import com.android.tradefed.retry.IRetryDecision; 64 import com.android.tradefed.retry.RetryStatistics; 65 import com.android.tradefed.suite.checker.ISystemStatusCheckerReceiver; 66 import com.android.tradefed.targetprep.BuildError; 67 import com.android.tradefed.targetprep.ITargetPreparer; 68 import com.android.tradefed.targetprep.TargetSetupError; 69 import com.android.tradefed.targetprep.multi.IMultiTargetPreparer; 70 import com.android.tradefed.testtype.IBuildReceiver; 71 import com.android.tradefed.testtype.IDeviceTest; 72 import com.android.tradefed.testtype.IInvocationContextReceiver; 73 import com.android.tradefed.testtype.IRemoteTest; 74 import com.android.tradefed.testtype.IRuntimeHintProvider; 75 import com.android.tradefed.testtype.ITestCollector; 76 import com.android.tradefed.testtype.suite.module.BaseModuleController; 77 import com.android.tradefed.testtype.suite.module.IModuleController.RunStrategy; 78 import com.android.tradefed.util.MultiMap; 79 import com.android.tradefed.util.StreamUtil; 80 import com.android.tradefed.util.proto.TfMetricProtoUtil; 81 82 import com.google.common.annotations.VisibleForTesting; 83 84 import java.util.ArrayList; 85 import java.util.Collection; 86 import java.util.Collections; 87 import java.util.HashMap; 88 import java.util.HashSet; 89 import java.util.List; 90 import java.util.ListIterator; 91 import java.util.Map; 92 import java.util.Map.Entry; 93 import java.util.Set; 94 95 /** 96 * Container for the test run configuration. This class is an helper to prepare and run the tests. 97 */ 98 public class ModuleDefinition implements Comparable<ModuleDefinition>, ITestCollector { 99 100 /** key names used for saving module info into {@link IInvocationContext} */ 101 /** 102 * Module name is the base name associated with the module, usually coming from the Xml TF 103 * config file the module was loaded from. 104 */ 105 public static final String MODULE_NAME = "module-name"; 106 public static final String MODULE_ABI = "module-abi"; 107 public static final String MODULE_PARAMETERIZATION = "module-param"; 108 /** 109 * Module ID the name that will be used to identify uniquely the module during testRunStart. It 110 * will usually be a combination of MODULE_ABI + MODULE_NAME. 111 */ 112 public static final String MODULE_ID = "module-id"; 113 114 public static final String MODULE_CONTROLLER = "module_controller"; 115 116 public static final String PREPARATION_TIME = "PREP_TIME"; 117 public static final String TEAR_DOWN_TIME = "TEARDOWN_TIME"; 118 public static final String TEST_TIME = "TEST_TIME"; 119 public static final String RETRY_TIME = "MODULE_RETRY_TIME"; 120 public static final String RETRY_SUCCESS_COUNT = "MODULE_RETRY_SUCCESS"; 121 public static final String RETRY_FAIL_COUNT = "MODULE_RETRY_FAILED"; 122 123 private final IInvocationContext mModuleInvocationContext; 124 private final IConfiguration mModuleConfiguration; 125 private IConfiguration mInternalTestConfiguration; 126 private IConfiguration mInternalTargetPreparerConfiguration; 127 private ILogSaver mLogSaver; 128 129 private final String mId; 130 private Collection<IRemoteTest> mTests = null; 131 private Map<String, List<ITargetPreparer>> mPreparersPerDevice = null; 132 133 private List<IMultiTargetPreparer> mMultiPreparers = new ArrayList<>(); 134 private IBuildInfo mBuild; 135 private ITestDevice mDevice; 136 private List<IMetricCollector> mRunMetricCollectors = new ArrayList<>(); 137 private boolean mCollectTestsOnly = false; 138 139 private List<TestRunResult> mTestsResults = new ArrayList<>(); 140 private List<ModuleListener> mRunListenersResults = new ArrayList<>(); 141 private int mExpectedTests = 0; 142 private boolean mIsFailedModule = false; 143 144 // Tracking of preparers performance 145 private long mElapsedPreparation = 0l; 146 private long mElapsedTearDown = 0l; 147 148 private long mStartTestTime = 0l; 149 private Long mStartModuleRunDate = null; 150 151 // Tracking of retry performance 152 private List<RetryStatistics> mRetryStats = new ArrayList<>(); 153 private boolean mDisableAutoRetryTimeReporting = false; 154 155 private boolean mMergeAttempts = true; 156 private IRetryDecision mRetryDecision; 157 158 // Token during sharding 159 private Set<TokenProperty> mRequiredTokens = new HashSet<>(); 160 161 private boolean mEnableDynamicDownload = false; 162 163 /** 164 * Constructor 165 * 166 * @param name unique name of the test configuration. 167 * @param tests list of {@link IRemoteTest} that needs to run. 168 * @param preparersPerDevice list of {@link ITargetPreparer} to be used to setup the device. 169 * @param moduleConfig the {@link IConfiguration} of the underlying module config. 170 */ ModuleDefinition( String name, Collection<IRemoteTest> tests, Map<String, List<ITargetPreparer>> preparersPerDevice, List<IMultiTargetPreparer> multiPreparers, IConfiguration moduleConfig)171 public ModuleDefinition( 172 String name, 173 Collection<IRemoteTest> tests, 174 Map<String, List<ITargetPreparer>> preparersPerDevice, 175 List<IMultiTargetPreparer> multiPreparers, 176 IConfiguration moduleConfig) { 177 mId = name; 178 mTests = tests; 179 mModuleConfiguration = moduleConfig; 180 ConfigurationDescriptor configDescriptor = moduleConfig.getConfigurationDescription(); 181 mModuleInvocationContext = new InvocationContext(); 182 mModuleInvocationContext.setConfigurationDescriptor(configDescriptor.clone()); 183 184 // If available in the suite, add the abi name 185 if (configDescriptor.getAbi() != null) { 186 mModuleInvocationContext.addInvocationAttribute( 187 MODULE_ABI, configDescriptor.getAbi().getName()); 188 } 189 if (configDescriptor.getModuleName() != null) { 190 mModuleInvocationContext.addInvocationAttribute( 191 MODULE_NAME, configDescriptor.getModuleName()); 192 } 193 String parameterization = 194 configDescriptor 195 .getAllMetaData() 196 .getUniqueMap() 197 .get(ConfigurationDescriptor.ACTIVE_PARAMETER_KEY); 198 if (parameterization != null) { 199 mModuleInvocationContext.addInvocationAttribute( 200 MODULE_PARAMETERIZATION, parameterization); 201 } 202 // If there is no specific abi, module-id should be module-name 203 mModuleInvocationContext.addInvocationAttribute(MODULE_ID, mId); 204 205 mMultiPreparers.addAll(multiPreparers); 206 mPreparersPerDevice = preparersPerDevice; 207 208 // Get the tokens of the module 209 List<String> tokens = configDescriptor.getMetaData(ITestSuite.TOKEN_KEY); 210 if (tokens != null) { 211 for (String token : tokens) { 212 mRequiredTokens.add(TokenProperty.valueOf(token.toUpperCase())); 213 } 214 } 215 } 216 217 /** 218 * Returns the next {@link IRemoteTest} from the list of tests. The list of tests of a module 219 * may be shared with another one in case of sharding. 220 */ poll()221 IRemoteTest poll() { 222 synchronized (mTests) { 223 if (mTests.isEmpty()) { 224 return null; 225 } 226 IRemoteTest test = mTests.iterator().next(); 227 mTests.remove(test); 228 return test; 229 } 230 } 231 232 /** 233 * Add some {@link IRemoteTest} to be executed as part of the module. Used when merging two 234 * modules. 235 */ addTests(List<IRemoteTest> test)236 void addTests(List<IRemoteTest> test) { 237 synchronized (mTests) { 238 mTests.addAll(test); 239 } 240 } 241 242 /** Returns the current number of {@link IRemoteTest} waiting to be executed. */ numTests()243 public int numTests() { 244 synchronized (mTests) { 245 return mTests.size(); 246 } 247 } 248 249 /** 250 * Return True if the Module still has {@link IRemoteTest} to run in its pool. False otherwise. 251 */ hasTests()252 protected boolean hasTests() { 253 synchronized (mTests) { 254 return mTests.isEmpty(); 255 } 256 } 257 258 /** Return the unique module name. */ getId()259 public String getId() { 260 return mId; 261 } 262 263 /** 264 * {@inheritDoc} 265 */ 266 @Override compareTo(ModuleDefinition moduleDef)267 public int compareTo(ModuleDefinition moduleDef) { 268 return getId().compareTo(moduleDef.getId()); 269 } 270 271 /** 272 * Inject the {@link IBuildInfo} to be used during the tests. 273 */ setBuild(IBuildInfo build)274 public void setBuild(IBuildInfo build) { 275 mBuild = build; 276 } 277 278 /** 279 * Inject the {@link ITestDevice} to be used during the tests. 280 */ setDevice(ITestDevice device)281 public void setDevice(ITestDevice device) { 282 mDevice = device; 283 } 284 285 /** Inject the List of {@link IMetricCollector} to be used by the module. */ setMetricCollectors(List<IMetricCollector> collectors)286 public void setMetricCollectors(List<IMetricCollector> collectors) { 287 if (collectors == null) { 288 return; 289 } 290 mRunMetricCollectors.addAll(collectors); 291 } 292 293 /** Pass the invocation log saver to the module so it can use it if necessary. */ setLogSaver(ILogSaver logSaver)294 public void setLogSaver(ILogSaver logSaver) { 295 mLogSaver = logSaver; 296 } 297 298 /** 299 * Run all the {@link IRemoteTest} contained in the module and use all the preparers before and 300 * after to setup and clean the device. 301 * 302 * @param listener the {@link ITestInvocationListener} where to report results. 303 * @throws DeviceNotAvailableException in case of device going offline. 304 */ run(TestInformation moduleInfo, ITestInvocationListener listener)305 public final void run(TestInformation moduleInfo, ITestInvocationListener listener) 306 throws DeviceNotAvailableException { 307 run(moduleInfo, listener, null, null); 308 } 309 310 /** 311 * Run all the {@link IRemoteTest} contained in the module and use all the preparers before and 312 * after to setup and clean the device. 313 * 314 * @param listener the {@link ITestInvocationListener} where to report results. 315 * @param moduleLevelListeners The list of listeners at the module level. 316 * @param failureListener a particular listener to collect logs on testFail. Can be null. 317 * @throws DeviceNotAvailableException in case of device going offline. 318 */ run( TestInformation moduleInfo, ITestInvocationListener listener, List<ITestInvocationListener> moduleLevelListeners, TestFailureListener failureListener)319 public final void run( 320 TestInformation moduleInfo, 321 ITestInvocationListener listener, 322 List<ITestInvocationListener> moduleLevelListeners, 323 TestFailureListener failureListener) 324 throws DeviceNotAvailableException { 325 run(moduleInfo, listener, moduleLevelListeners, failureListener, 1); 326 } 327 328 /** 329 * Run all the {@link IRemoteTest} contained in the module and use all the preparers before and 330 * after to setup and clean the device. 331 * 332 * @param moduleInfo the {@link TestInformation} for the module. 333 * @param listener the {@link ITestInvocationListener} where to report results. 334 * @param moduleLevelListeners The list of listeners at the module level. 335 * @param failureListener a particular listener to collect logs on testFail. Can be null. 336 * @param maxRunLimit the max number of runs for each testcase. 337 * @throws DeviceNotAvailableException in case of device going offline. 338 */ run( TestInformation moduleInfo, ITestInvocationListener listener, List<ITestInvocationListener> moduleLevelListeners, TestFailureListener failureListener, int maxRunLimit)339 public final void run( 340 TestInformation moduleInfo, 341 ITestInvocationListener listener, 342 List<ITestInvocationListener> moduleLevelListeners, 343 TestFailureListener failureListener, 344 int maxRunLimit) 345 throws DeviceNotAvailableException { 346 mStartModuleRunDate = System.currentTimeMillis(); 347 // Load extra configuration for the module from module_controller 348 // TODO: make module_controller a full TF object 349 boolean skipTestCases = false; 350 RunStrategy rs = applyConfigurationControl(failureListener); 351 if (RunStrategy.FULL_MODULE_BYPASS.equals(rs)) { 352 CLog.d("module_controller applied and module %s should not run.", getId()); 353 return; 354 } else if (RunStrategy.SKIP_MODULE_TESTCASES.equals(rs)) { 355 CLog.d("All tests cases for %s will be marked skipped.", getId()); 356 skipTestCases = true; 357 } 358 359 CLog.logAndDisplay(LogLevel.DEBUG, "Running module %s", getId()); 360 // Exception generated during setUp or run of the tests 361 Throwable preparationException = null; 362 DeviceNotAvailableException runException = null; 363 // Resolve dynamic files except for the IRemoteTest ones 364 preparationException = invokeRemoteDynamic(moduleInfo.getDevice(), mModuleConfiguration); 365 366 if (preparationException == null) { 367 mInternalTargetPreparerConfiguration = 368 new Configuration("tmp-download", "tmp-download"); 369 mInternalTargetPreparerConfiguration 370 .getCommandOptions() 371 .getDynamicDownloadArgs() 372 .putAll(mModuleConfiguration.getCommandOptions().getDynamicDownloadArgs()); 373 for (String device : mPreparersPerDevice.keySet()) { 374 mInternalTargetPreparerConfiguration.setDeviceConfig( 375 new DeviceConfigurationHolder(device)); 376 for (ITargetPreparer preparer : mPreparersPerDevice.get(device)) { 377 try { 378 mInternalTargetPreparerConfiguration 379 .getDeviceConfigByName(device) 380 .addSpecificConfig(preparer); 381 } catch (ConfigurationException e) { 382 // Shouldn't happen; 383 throw new RuntimeException(e); 384 } 385 } 386 } 387 mInternalTargetPreparerConfiguration.setMultiTargetPreparers(mMultiPreparers); 388 preparationException = 389 invokeRemoteDynamic( 390 moduleInfo.getDevice(), mInternalTargetPreparerConfiguration); 391 } 392 // Setup 393 long prepStartTime = getCurrentTime(); 394 if (preparationException == null) { 395 preparationException = runTargetPreparation(moduleInfo, listener); 396 } 397 // Skip multi-preparation if preparation already failed. 398 if (preparationException == null) { 399 for (IMultiTargetPreparer multiPreparer : mMultiPreparers) { 400 preparationException = runMultiPreparerSetup(multiPreparer, moduleInfo, listener); 401 if (preparationException != null) { 402 mIsFailedModule = true; 403 CLog.e("Some preparation step failed. failing the module %s", getId()); 404 break; 405 } 406 } 407 } 408 mElapsedPreparation = getCurrentTime() - prepStartTime; 409 // Run the tests 410 try { 411 if (preparationException != null) { 412 reportSetupFailure(preparationException, listener, moduleLevelListeners); 413 return; 414 } 415 mStartTestTime = getCurrentTime(); 416 while (true) { 417 IRemoteTest test = poll(); 418 if (test == null) { 419 return; 420 } 421 TfObjectTracker.countWithParents(test.getClass()); 422 if (test instanceof IBuildReceiver) { 423 ((IBuildReceiver) test).setBuild(mBuild); 424 } 425 if (test instanceof IDeviceTest) { 426 ((IDeviceTest) test).setDevice(mDevice); 427 } 428 if (test instanceof IInvocationContextReceiver) { 429 ((IInvocationContextReceiver) test) 430 .setInvocationContext(mModuleInvocationContext); 431 } 432 mInternalTestConfiguration = new Configuration("tmp-download", "tmp-download"); 433 mInternalTestConfiguration 434 .getCommandOptions() 435 .getDynamicDownloadArgs() 436 .putAll(mModuleConfiguration.getCommandOptions().getDynamicDownloadArgs()); 437 // We do it before the official set, otherwise the IConfiguration will not be the 438 // right one. 439 mInternalTestConfiguration.setTest(test); 440 if (test instanceof IConfigurationReceiver) { 441 ((IConfigurationReceiver) test).setConfiguration(mModuleConfiguration); 442 } 443 if (test instanceof ISystemStatusCheckerReceiver) { 444 // We do not pass down Status checker because they are already running at the 445 // top level suite. 446 ((ISystemStatusCheckerReceiver) test).setSystemStatusChecker(new ArrayList<>()); 447 } 448 if (test instanceof ITestCollector) { 449 if (skipTestCases) { 450 mCollectTestsOnly = true; 451 } 452 ((ITestCollector) test).setCollectTestsOnly(mCollectTestsOnly); 453 } 454 GranularRetriableTestWrapper retriableTest = 455 prepareGranularRetriableWrapper( 456 test, 457 listener, 458 failureListener, 459 moduleLevelListeners, 460 skipTestCases, 461 maxRunLimit); 462 retriableTest.setCollectTestsOnly(mCollectTestsOnly); 463 // Resolve the dynamic options for that one test. 464 preparationException = 465 invokeRemoteDynamic(moduleInfo.getDevice(), mInternalTestConfiguration); 466 if (preparationException != null) { 467 reportSetupFailure(preparationException, listener, moduleLevelListeners); 468 return; 469 } 470 try { 471 retriableTest.run(moduleInfo, listener); 472 } catch (DeviceNotAvailableException dnae) { 473 runException = dnae; 474 // We do special logging of some information in Context of the module for easier 475 // debugging. 476 CLog.e( 477 "Module %s threw a DeviceNotAvailableException on device %s during " 478 + "test %s", 479 getId(), mDevice.getSerialNumber(), test.getClass()); 480 CLog.e(dnae); 481 // log an events 482 logDeviceEvent( 483 EventType.MODULE_DEVICE_NOT_AVAILABLE, 484 mDevice.getSerialNumber(), 485 dnae, 486 getId()); 487 throw dnae; 488 } finally { 489 mInternalTestConfiguration.cleanConfigurationData(); 490 mInternalTestConfiguration = null; 491 if (mMergeAttempts) { 492 // A single module can generate several test runs 493 mTestsResults.addAll(retriableTest.getFinalTestRunResults()); 494 } else { 495 // Keep track of each listener for attempts 496 mRunListenersResults.add(retriableTest.getResultListener()); 497 } 498 499 mExpectedTests += retriableTest.getExpectedTestsCount(); 500 // Get information about retry 501 if (mRetryDecision != null) { 502 RetryStatistics res = mRetryDecision.getRetryStatistics(); 503 if (res != null) { 504 addRetryTime(res.mRetryTime); 505 mRetryStats.add(res); 506 } 507 } 508 } 509 // After the run, if the test failed (even after retry the final result passed) has 510 // failed, capture a bugreport. 511 if (retriableTest.getResultListener().hasLastAttemptFailed()) { 512 captureBugreport(listener, getId()); 513 } 514 } 515 } finally { 516 // Clean target preparers dynamic files. 517 if (mInternalTargetPreparerConfiguration != null) { 518 mInternalTargetPreparerConfiguration.cleanConfigurationData(); 519 mInternalTargetPreparerConfiguration = null; 520 } 521 long cleanStartTime = getCurrentTime(); 522 RuntimeException tearDownException = null; 523 try { 524 Throwable exception = (runException != null) ? runException : preparationException; 525 // Tear down 526 runTearDown(moduleInfo, exception); 527 } catch (DeviceNotAvailableException dnae) { 528 CLog.e( 529 "Module %s failed during tearDown with: %s", 530 getId(), StreamUtil.getStackTrace(dnae)); 531 throw dnae; 532 } catch (RuntimeException e) { 533 CLog.e("Exception while running tearDown:"); 534 CLog.e(e); 535 tearDownException = e; 536 } finally { 537 if (failureListener != null) { 538 failureListener.join(); 539 } 540 mElapsedTearDown = getCurrentTime() - cleanStartTime; 541 // finalize results 542 if (preparationException == null) { 543 mModuleConfiguration.cleanConfigurationData(); 544 if (mMergeAttempts) { 545 reportFinalResults( 546 listener, mExpectedTests, mTestsResults, null, tearDownException); 547 } else { 548 // Push the attempts one by one 549 for (int i = 0; i < maxRunLimit; i++) { 550 // Get all the results for the attempt 551 List<TestRunResult> runResultList = new ArrayList<TestRunResult>(); 552 int expectedCount = 0; 553 for (ModuleListener attemptListener : mRunListenersResults) { 554 for (String runName : attemptListener.getTestRunNames()) { 555 TestRunResult run = 556 attemptListener.getTestRunAtAttempt(runName, i); 557 if (run != null) { 558 runResultList.add(run); 559 expectedCount += run.getExpectedTestCount(); 560 } 561 } 562 } 563 564 if (!runResultList.isEmpty()) { 565 reportFinalResults( 566 listener, 567 expectedCount, 568 runResultList, 569 i, 570 tearDownException); 571 } else { 572 CLog.d("No results to be forwarded for attempt %s.", i); 573 } 574 } 575 } 576 } 577 } 578 } 579 } 580 581 /** 582 * Create a wrapper class for the {@link IRemoteTest} which has built-in logic to schedule 583 * multiple test runs for the same module, and have the ability to run testcases at a more 584 * granular level (a subset of testcases in the module). 585 * 586 * @param test the {@link IRemoteTest} that is being wrapped. 587 * @param failureListener a particular listener to collect logs on testFail. Can be null. 588 * @param skipTestCases A run strategy when SKIP_MODULE_TESTCASES is defined. 589 * @param maxRunLimit a rate-limiter on testcases retrying times. 590 */ 591 @VisibleForTesting prepareGranularRetriableWrapper( IRemoteTest test, ITestInvocationListener listener, TestFailureListener failureListener, List<ITestInvocationListener> moduleLevelListeners, boolean skipTestCases, int maxRunLimit)592 GranularRetriableTestWrapper prepareGranularRetriableWrapper( 593 IRemoteTest test, 594 ITestInvocationListener listener, 595 TestFailureListener failureListener, 596 List<ITestInvocationListener> moduleLevelListeners, 597 boolean skipTestCases, 598 int maxRunLimit) { 599 GranularRetriableTestWrapper retriableTest = 600 new GranularRetriableTestWrapper( 601 test, listener, failureListener, moduleLevelListeners, maxRunLimit); 602 retriableTest.setModuleId(getId()); 603 retriableTest.setMarkTestsSkipped(skipTestCases); 604 retriableTest.setMetricCollectors(mRunMetricCollectors); 605 retriableTest.setModuleConfig(mModuleConfiguration); 606 retriableTest.setInvocationContext(mModuleInvocationContext); 607 retriableTest.setLogSaver(mLogSaver); 608 retriableTest.setRetryDecision(mRetryDecision); 609 return retriableTest; 610 } 611 captureBugreport(ITestLogger listener, String moduleId)612 private void captureBugreport(ITestLogger listener, String moduleId) { 613 for (ITestDevice device : mModuleInvocationContext.getDevices()) { 614 if (device.getIDevice() instanceof StubDevice) { 615 continue; 616 } 617 device.logBugreport( 618 String.format( 619 "module-%s-failure-%s-bugreport", moduleId, device.getSerialNumber()), 620 listener); 621 } 622 } 623 624 /** Helper to log the device events. */ logDeviceEvent(EventType event, String serial, Throwable t, String moduleId)625 private void logDeviceEvent(EventType event, String serial, Throwable t, String moduleId) { 626 Map<String, String> args = new HashMap<>(); 627 args.put("serial", serial); 628 args.put("trace", StreamUtil.getStackTrace(t)); 629 args.put("module-id", moduleId); 630 LogRegistry.getLogRegistry().logEvent(LogLevel.DEBUG, event, args); 631 } 632 633 /** Finalize results to report them all and count if there are missing tests. */ reportFinalResults( ITestInvocationListener listener, int totalExpectedTests, List<TestRunResult> listResults, Integer attempt, RuntimeException tearDownException)634 private void reportFinalResults( 635 ITestInvocationListener listener, 636 int totalExpectedTests, 637 List<TestRunResult> listResults, 638 Integer attempt, 639 RuntimeException tearDownException) { 640 long elapsedTime = 0l; 641 HashMap<String, Metric> metricsProto = new HashMap<>(); 642 if (attempt != null) { 643 long startTime = 644 listResults.isEmpty() ? mStartTestTime : listResults.get(0).getStartTime(); 645 listener.testRunStarted(getId(), totalExpectedTests, attempt, startTime); 646 } else { 647 listener.testRunStarted(getId(), totalExpectedTests, 0, mStartTestTime); 648 } 649 int numResults = 0; 650 MultiMap<String, LogFile> aggLogFiles = new MultiMap<>(); 651 List<FailureDescription> runFailureMessages = new ArrayList<>(); 652 for (TestRunResult runResult : listResults) { 653 numResults += runResult.getTestResults().size(); 654 forwardTestResults(runResult.getTestResults(), listener); 655 if (runResult.isRunFailure()) { 656 runFailureMessages.add(runResult.getRunFailureDescription()); 657 } 658 elapsedTime += runResult.getElapsedTime(); 659 // put metrics from the tests 660 metricsProto.putAll(runResult.getRunProtoMetrics()); 661 aggLogFiles.putAll(runResult.getRunLoggedFiles()); 662 } 663 // put metrics from the preparation 664 metricsProto.put( 665 PREPARATION_TIME, 666 TfMetricProtoUtil.createSingleValue(mElapsedPreparation, "milliseconds")); 667 metricsProto.put( 668 TEAR_DOWN_TIME, 669 TfMetricProtoUtil.createSingleValue(mElapsedTearDown, "milliseconds")); 670 metricsProto.put( 671 TEST_TIME, TfMetricProtoUtil.createSingleValue(elapsedTime, "milliseconds")); 672 // Report all the retry informations 673 if (!mRetryStats.isEmpty()) { 674 RetryStatistics agg = RetryStatistics.aggregateStatistics(mRetryStats); 675 metricsProto.put( 676 RETRY_TIME, 677 TfMetricProtoUtil.createSingleValue(agg.mRetryTime, "milliseconds")); 678 metricsProto.put( 679 RETRY_SUCCESS_COUNT, 680 TfMetricProtoUtil.createSingleValue(agg.mRetrySuccess, "")); 681 metricsProto.put( 682 RETRY_FAIL_COUNT, TfMetricProtoUtil.createSingleValue(agg.mRetryFailure, "")); 683 } 684 685 // Only report the mismatch if there were no error during the run. 686 if (runFailureMessages.isEmpty() && totalExpectedTests != numResults) { 687 String error = 688 String.format( 689 "Module %s only ran %d out of %d expected tests.", 690 getId(), numResults, totalExpectedTests); 691 FailureDescription mismatch = 692 FailureDescription.create(error) 693 .setFailureStatus(FailureStatus.TEST_FAILURE) 694 .setErrorIdentifier(InfraErrorIdentifier.EXPECTED_TESTS_MISMATCH); 695 runFailureMessages.add(mismatch); 696 CLog.e(error); 697 } 698 699 if (tearDownException != null) { 700 FailureDescription failure = 701 CurrentInvocation.createFailure( 702 StreamUtil.getStackTrace(tearDownException), null) 703 .setCause(tearDownException); 704 runFailureMessages.add(failure); 705 } 706 // If there is any errors report them all at once 707 if (!runFailureMessages.isEmpty()) { 708 if (runFailureMessages.size() == 1) { 709 listener.testRunFailed(runFailureMessages.get(0)); 710 } else { 711 listener.testRunFailed(new MultiFailureDescription(runFailureMessages)); 712 } 713 mIsFailedModule = true; 714 } 715 716 // Provide a strong association of the run to its logs. 717 for (String key : aggLogFiles.keySet()) { 718 for (LogFile logFile : aggLogFiles.get(key)) { 719 if (listener instanceof ILogSaverListener) { 720 ((ILogSaverListener) listener).logAssociation(key, logFile); 721 } 722 } 723 } 724 // Allow each attempt to have its own start/end time 725 if (attempt != null) { 726 listener.testRunEnded(elapsedTime, metricsProto); 727 } else { 728 listener.testRunEnded(getCurrentTime() - mStartTestTime, metricsProto); 729 } 730 } 731 forwardTestResults( Map<TestDescription, TestResult> testResults, ITestInvocationListener listener)732 private void forwardTestResults( 733 Map<TestDescription, TestResult> testResults, ITestInvocationListener listener) { 734 for (Map.Entry<TestDescription, TestResult> testEntry : testResults.entrySet()) { 735 listener.testStarted(testEntry.getKey(), testEntry.getValue().getStartTime()); 736 switch (testEntry.getValue().getStatus()) { 737 case FAILURE: 738 listener.testFailed(testEntry.getKey(), testEntry.getValue().getStackTrace()); 739 break; 740 case ASSUMPTION_FAILURE: 741 listener.testAssumptionFailure( 742 testEntry.getKey(), testEntry.getValue().getStackTrace()); 743 break; 744 case IGNORED: 745 listener.testIgnored(testEntry.getKey()); 746 break; 747 case INCOMPLETE: 748 listener.testFailed( 749 testEntry.getKey(), "Test did not complete due to exception."); 750 break; 751 default: 752 break; 753 } 754 // Provide a strong association of the test to its logs. 755 for (Entry<String, LogFile> logFile : 756 testEntry.getValue().getLoggedFiles().entrySet()) { 757 if (listener instanceof ILogSaverListener) { 758 ((ILogSaverListener) listener) 759 .logAssociation(logFile.getKey(), logFile.getValue()); 760 } 761 } 762 listener.testEnded( 763 testEntry.getKey(), 764 testEntry.getValue().getEndTime(), 765 testEntry.getValue().getProtoMetrics()); 766 } 767 } 768 769 /** Run all the prepare steps. */ runPreparerSetup( TestInformation moduleInfo, ITargetPreparer preparer, ITestLogger logger, int deviceIndex)770 private Throwable runPreparerSetup( 771 TestInformation moduleInfo, 772 ITargetPreparer preparer, 773 ITestLogger logger, 774 int deviceIndex) { 775 if (preparer.isDisabled()) { 776 // If disabled skip completely. 777 return null; 778 } 779 TfObjectTracker.countWithParents(preparer.getClass()); 780 CLog.d("Running setup preparer: %s", preparer.getClass().getSimpleName()); 781 try { 782 // set the logger in case they need it. 783 if (preparer instanceof ITestLoggerReceiver) { 784 ((ITestLoggerReceiver) preparer).setTestLogger(logger); 785 } 786 if (preparer instanceof IInvocationContextReceiver) { 787 ((IInvocationContextReceiver) preparer) 788 .setInvocationContext(mModuleInvocationContext); 789 } 790 moduleInfo.setActiveDeviceIndex(deviceIndex); 791 preparer.setUp(moduleInfo); 792 return null; 793 } catch (BuildError 794 | TargetSetupError 795 | DeviceNotAvailableException 796 | RuntimeException 797 | AssertionError e) { 798 // We catch all the TargetPreparer possible exception + RuntimeException to avoid 799 // specific issues + AssertionError since it's widely used in tests and doesn't notify 800 // something very wrong with the harness. 801 CLog.e("Unexpected Exception from preparer: %s", preparer.getClass().getName()); 802 CLog.e(e); 803 return e; 804 } finally { 805 moduleInfo.setActiveDeviceIndex(0); 806 } 807 } 808 809 /** Run all multi target preparer step. */ runMultiPreparerSetup( IMultiTargetPreparer preparer, TestInformation moduleInfo, ITestLogger logger)810 private Throwable runMultiPreparerSetup( 811 IMultiTargetPreparer preparer, TestInformation moduleInfo, ITestLogger logger) { 812 if (preparer.isDisabled()) { 813 // If disabled skip completely. 814 return null; 815 } 816 TfObjectTracker.countWithParents(preparer.getClass()); 817 CLog.d("Running setup multi preparer: %s", preparer.getClass().getSimpleName()); 818 try { 819 // set the logger in case they need it. 820 if (preparer instanceof ITestLoggerReceiver) { 821 ((ITestLoggerReceiver) preparer).setTestLogger(logger); 822 } 823 if (preparer instanceof IInvocationContextReceiver) { 824 ((IInvocationContextReceiver) preparer) 825 .setInvocationContext(mModuleInvocationContext); 826 } 827 preparer.setUp(moduleInfo); 828 return null; 829 } catch (BuildError 830 | TargetSetupError 831 | DeviceNotAvailableException 832 | RuntimeException 833 | AssertionError e) { 834 // We catch all the MultiTargetPreparer possible exception + RuntimeException to avoid 835 // specific issues + AssertionError since it's widely used in tests and doesn't notify 836 // something very wrong with the harness. 837 CLog.e("Unexpected Exception from preparer: %s", preparer.getClass().getName()); 838 CLog.e(e); 839 return e; 840 } 841 } 842 843 /** Run all the tear down steps from preparers. */ runTearDown(TestInformation moduleInfo, Throwable exception)844 private void runTearDown(TestInformation moduleInfo, Throwable exception) 845 throws DeviceNotAvailableException { 846 // Tear down 847 List<IMultiTargetPreparer> cleanerList = new ArrayList<>(mMultiPreparers); 848 Collections.reverse(cleanerList); 849 for (IMultiTargetPreparer multiCleaner : cleanerList) { 850 if (multiCleaner.isDisabled() || multiCleaner.isTearDownDisabled()) { 851 // If disabled skip completely. 852 continue; 853 } 854 CLog.d("Running teardown multi cleaner: %s", multiCleaner.getClass().getSimpleName()); 855 multiCleaner.tearDown(moduleInfo, exception); 856 } 857 858 for (int i = 0; i < mModuleInvocationContext.getDeviceConfigNames().size(); i++) { 859 String deviceName = mModuleInvocationContext.getDeviceConfigNames().get(i); 860 ITestDevice device = mModuleInvocationContext.getDevice(deviceName); 861 if (i >= mPreparersPerDevice.size()) { 862 CLog.d( 863 "Main configuration has more devices than the module configuration. '%s' " 864 + "will not run any tear down.", 865 deviceName); 866 continue; 867 } 868 List<ITargetPreparer> preparers = mPreparersPerDevice.get(deviceName); 869 if (preparers == null) { 870 CLog.w( 871 "Module configuration devices mismatch the main configuration " 872 + "(Missing device '%s'), resolving preparers by index.", 873 deviceName); 874 String key = new ArrayList<>(mPreparersPerDevice.keySet()).get(i); 875 preparers = mPreparersPerDevice.get(key); 876 } 877 ListIterator<ITargetPreparer> itr = preparers.listIterator(preparers.size()); 878 while (itr.hasPrevious()) { 879 ITargetPreparer preparer = itr.previous(); 880 // do not call the cleaner if it was disabled 881 if (preparer.isDisabled() || preparer.isTearDownDisabled()) { 882 CLog.d("%s has been disabled. skipping.", preparer); 883 continue; 884 } 885 886 RecoveryMode origMode = null; 887 try { 888 // If an exception was generated in setup with a DNAE do not attempt any 889 // recovery again in case we hit the device not available again. 890 if (exception != null && exception instanceof DeviceNotAvailableException) { 891 origMode = device.getRecoveryMode(); 892 device.setRecoveryMode(RecoveryMode.NONE); 893 } 894 moduleInfo.setActiveDeviceIndex(i); 895 preparer.tearDown(moduleInfo, exception); 896 } finally { 897 moduleInfo.setActiveDeviceIndex(0); 898 if (origMode != null) { 899 device.setRecoveryMode(origMode); 900 } 901 } 902 } 903 } 904 } 905 906 /** Returns the current time. */ getCurrentTime()907 private long getCurrentTime() { 908 return System.currentTimeMillis(); 909 } 910 911 @Override setCollectTestsOnly(boolean collectTestsOnly)912 public void setCollectTestsOnly(boolean collectTestsOnly) { 913 mCollectTestsOnly = collectTestsOnly; 914 } 915 916 /** Sets whether or not we should merge results. */ setMergeAttemps(boolean mergeAttempts)917 public final void setMergeAttemps(boolean mergeAttempts) { 918 mMergeAttempts = mergeAttempts; 919 } 920 921 /** Sets the {@link IRetryDecision} to be used for intra-module retry. */ setRetryDecision(IRetryDecision decision)922 public final void setRetryDecision(IRetryDecision decision) { 923 mRetryDecision = decision; 924 // Carry the retry decision to the module configuration 925 mModuleConfiguration.setRetryDecision(decision); 926 } 927 928 /** Returns a list of tests that ran in this module. */ getTestsResults()929 List<TestRunResult> getTestsResults() { 930 return mTestsResults; 931 } 932 933 /** Returns the number of tests that was expected to be run */ getNumExpectedTests()934 int getNumExpectedTests() { 935 return mExpectedTests; 936 } 937 938 /** Returns True if a testRunFailure has been called on the module * */ hasModuleFailed()939 public boolean hasModuleFailed() { 940 return mIsFailedModule; 941 } 942 getRequiredTokens()943 public Set<TokenProperty> getRequiredTokens() { 944 return mRequiredTokens; 945 } 946 947 /** {@inheritDoc} */ 948 @Override toString()949 public String toString() { 950 return getId(); 951 } 952 953 /** Returns the approximate time to run all the tests in the module. */ getRuntimeHint()954 public long getRuntimeHint() { 955 long hint = 0l; 956 for (IRemoteTest test : mTests) { 957 if (test instanceof IRuntimeHintProvider) { 958 hint += ((IRuntimeHintProvider) test).getRuntimeHint(); 959 } else { 960 hint += 60000; 961 } 962 } 963 return hint; 964 } 965 966 /** Returns the list of {@link IRemoteTest} defined for this module. */ 967 @VisibleForTesting getTests()968 List<IRemoteTest> getTests() { 969 return new ArrayList<>(mTests); 970 } 971 972 /** Returns the list of {@link ITargetPreparer} associated with the given device name */ 973 @VisibleForTesting getTargetPreparerForDevice(String deviceName)974 List<ITargetPreparer> getTargetPreparerForDevice(String deviceName) { 975 return mPreparersPerDevice.get(deviceName); 976 } 977 978 /** 979 * When running unit tests for ModuleDefinition we don't want to unnecessarily report some auto 980 * retry times. 981 */ 982 @VisibleForTesting disableAutoRetryReportingTime()983 void disableAutoRetryReportingTime() { 984 mDisableAutoRetryTimeReporting = true; 985 } 986 987 /** Returns the {@link IInvocationContext} associated with the module. */ getModuleInvocationContext()988 public IInvocationContext getModuleInvocationContext() { 989 return mModuleInvocationContext; 990 } 991 992 /** Report completely not executed modules. */ reportNotExecuted(ITestInvocationListener listener, String message)993 public final void reportNotExecuted(ITestInvocationListener listener, String message) { 994 if (mStartModuleRunDate == null) { 995 listener.testModuleStarted(getModuleInvocationContext()); 996 } 997 listener.testRunStarted(getId(), 0, 0, System.currentTimeMillis()); 998 FailureDescription description = 999 FailureDescription.create(message).setFailureStatus(FailureStatus.NOT_EXECUTED); 1000 listener.testRunFailed(description); 1001 listener.testRunEnded(0, new HashMap<String, Metric>()); 1002 listener.testModuleEnded(); 1003 } 1004 1005 /** Whether or not to enable dynamic download at module level. */ setEnableDynamicDownload(boolean enableDynamicDownload)1006 public void setEnableDynamicDownload(boolean enableDynamicDownload) { 1007 mEnableDynamicDownload = enableDynamicDownload; 1008 } 1009 addDynamicDownloadArgs(Map<String, String> extraArgs)1010 public void addDynamicDownloadArgs(Map<String, String> extraArgs) { 1011 mModuleConfiguration.getCommandOptions().getDynamicDownloadArgs().putAll(extraArgs); 1012 } 1013 1014 /** 1015 * Allow to load a module_controller object to tune how should a particular module run. 1016 * 1017 * @param failureListener The {@link TestFailureListener} taking actions on tests failures. 1018 * @return The strategy to use to run the tests. 1019 */ applyConfigurationControl(TestFailureListener failureListener)1020 private RunStrategy applyConfigurationControl(TestFailureListener failureListener) { 1021 Object ctrlObject = mModuleConfiguration.getConfigurationObject(MODULE_CONTROLLER); 1022 if (ctrlObject != null && ctrlObject instanceof BaseModuleController) { 1023 BaseModuleController controller = (BaseModuleController) ctrlObject; 1024 // module_controller can also control the log collection for the one module 1025 if (failureListener != null) { 1026 failureListener.applyModuleConfiguration(controller.shouldCaptureBugreport()); 1027 } 1028 if (!controller.shouldCaptureLogcat()) { 1029 mRunMetricCollectors.removeIf(c -> (c instanceof LogcatOnFailureCollector)); 1030 } 1031 if (!controller.shouldCaptureScreenshot()) { 1032 mRunMetricCollectors.removeIf(c -> (c instanceof ScreenshotOnFailureCollector)); 1033 } 1034 return controller.shouldRunModule(mModuleInvocationContext); 1035 } 1036 return RunStrategy.RUN; 1037 } 1038 addRetryTime(long retryTimeMs)1039 private void addRetryTime(long retryTimeMs) { 1040 if (retryTimeMs <= 0 || mDisableAutoRetryTimeReporting) { 1041 return; 1042 } 1043 InvocationMetricLogger.addInvocationMetrics( 1044 InvocationMetricKey.AUTO_RETRY_TIME, retryTimeMs); 1045 } 1046 runTargetPreparation(TestInformation moduleInfo, ITestLogger logger)1047 private Throwable runTargetPreparation(TestInformation moduleInfo, ITestLogger logger) { 1048 Throwable preparationException = null; 1049 for (int i = 0; i < mModuleInvocationContext.getDeviceConfigNames().size(); i++) { 1050 String deviceName = mModuleInvocationContext.getDeviceConfigNames().get(i); 1051 if (i >= mPreparersPerDevice.size()) { 1052 CLog.d( 1053 "Main configuration has more devices than the module configuration. '%s' " 1054 + "will not run any preparation.", 1055 deviceName); 1056 continue; 1057 } 1058 List<ITargetPreparer> preparers = mPreparersPerDevice.get(deviceName); 1059 if (preparers == null) { 1060 CLog.w( 1061 "Module configuration devices mismatch the main configuration " 1062 + "(Missing device '%s'), resolving preparers by index.", 1063 deviceName); 1064 String key = new ArrayList<>(mPreparersPerDevice.keySet()).get(i); 1065 preparers = mPreparersPerDevice.get(key); 1066 } 1067 for (ITargetPreparer preparer : preparers) { 1068 preparationException = runPreparerSetup(moduleInfo, preparer, logger, i); 1069 if (preparationException != null) { 1070 mIsFailedModule = true; 1071 CLog.e("Some preparation step failed. failing the module %s", getId()); 1072 // If one device errored out, we skip the remaining devices. 1073 return preparationException; 1074 } 1075 } 1076 } 1077 return null; 1078 } 1079 1080 /** 1081 * Handle calling the {@link IConfiguration#resolveDynamicOptions(DynamicRemoteFileResolver)}. 1082 */ invokeRemoteDynamic(ITestDevice device, IConfiguration moduleConfiguration)1083 private Exception invokeRemoteDynamic(ITestDevice device, IConfiguration moduleConfiguration) { 1084 if (!mEnableDynamicDownload) { 1085 return null; 1086 } 1087 // TODO: Add elapsed time tracking 1088 try { 1089 CLog.d("Attempting to resolve dynamic files from %s", getId()); 1090 DynamicRemoteFileResolver resolver = new DynamicRemoteFileResolver(); 1091 resolver.setDevice(device); 1092 resolver.addExtraArgs(moduleConfiguration.getCommandOptions().getDynamicDownloadArgs()); 1093 moduleConfiguration.resolveDynamicOptions(resolver); 1094 return null; 1095 } catch (RuntimeException | ConfigurationException | BuildRetrievalError e) { 1096 mIsFailedModule = true; 1097 return e; 1098 } 1099 } 1100 1101 /** Report a setup exception as a run failure and notify all the listeners. */ reportSetupFailure( Throwable setupException, ITestInvocationListener invocListener, List<ITestInvocationListener> moduleListeners)1102 private void reportSetupFailure( 1103 Throwable setupException, 1104 ITestInvocationListener invocListener, 1105 List<ITestInvocationListener> moduleListeners) 1106 throws DeviceNotAvailableException { 1107 List<ITestInvocationListener> allListeners = new ArrayList<>(); 1108 allListeners.add(invocListener); 1109 if (moduleListeners != null) { 1110 allListeners.addAll(moduleListeners); 1111 } 1112 // Report the early module failures to the moduleListeners too in order for them 1113 // to know about it. 1114 ITestInvocationListener forwarder = new ResultForwarder(allListeners); 1115 // For reporting purpose we create a failure placeholder with the error stack 1116 // similar to InitializationError of JUnit. 1117 forwarder.testRunStarted(getId(), 1, 0, System.currentTimeMillis()); 1118 FailureDescription failureDescription = 1119 CurrentInvocation.createFailure(StreamUtil.getStackTrace(setupException), null); 1120 if (setupException instanceof IHarnessException 1121 && ((IHarnessException) setupException).getErrorId() != null) { 1122 ErrorIdentifier id = ((IHarnessException) setupException).getErrorId(); 1123 failureDescription.setErrorIdentifier(id); 1124 failureDescription.setFailureStatus(id.status()); 1125 failureDescription.setOrigin(((IHarnessException) setupException).getOrigin()); 1126 } else { 1127 failureDescription.setFailureStatus(FailureStatus.UNSET); 1128 } 1129 failureDescription.setCause(setupException); 1130 forwarder.testRunFailed(failureDescription); 1131 HashMap<String, Metric> metricsProto = new HashMap<>(); 1132 metricsProto.put(TEST_TIME, TfMetricProtoUtil.createSingleValue(0L, "milliseconds")); 1133 forwarder.testRunEnded(0, metricsProto); 1134 // If it was a not available exception rethrow it to signal the new device state. 1135 if (setupException instanceof DeviceNotAvailableException) { 1136 throw (DeviceNotAvailableException) setupException; 1137 } 1138 } 1139 } 1140