InstrumentationTestRunner.java revision f48e94005b6f7d648ebe659960bed32998779439
1/* 2 * Copyright (C) 2007 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.test; 18 19import com.android.internal.util.Predicate; 20import com.android.internal.util.Predicates; 21 22import android.app.Activity; 23import android.app.Instrumentation; 24import android.os.Bundle; 25import android.os.Debug; 26import android.os.Looper; 27import android.os.Parcelable; 28import android.os.PerformanceCollector; 29import android.os.PerformanceCollector.PerformanceResultsWriter; 30import android.test.suitebuilder.TestMethod; 31import android.test.suitebuilder.TestPredicates; 32import android.test.suitebuilder.TestSuiteBuilder; 33import android.test.suitebuilder.annotation.HasAnnotation; 34import android.test.suitebuilder.annotation.LargeTest; 35import android.util.Log; 36 37import java.io.ByteArrayOutputStream; 38import java.io.File; 39import java.io.PrintStream; 40import java.lang.annotation.Annotation; 41import java.lang.reflect.InvocationTargetException; 42import java.lang.reflect.Method; 43import java.util.ArrayList; 44import java.util.List; 45 46import junit.framework.AssertionFailedError; 47import junit.framework.Test; 48import junit.framework.TestCase; 49import junit.framework.TestListener; 50import junit.framework.TestResult; 51import junit.framework.TestSuite; 52import junit.runner.BaseTestRunner; 53import junit.textui.ResultPrinter; 54 55/** 56 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against 57 * an Android package (application). Typical usage: 58 * <ol> 59 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests 60 * against the classes in your package. Typically these are subclassed from: 61 * <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li> 62 * <li>{@link android.test.ActivityUnitTestCase}</li> 63 * <li>{@link android.test.AndroidTestCase}</li> 64 * <li>{@link android.test.ApplicationTestCase}</li> 65 * <li>{@link android.test.InstrumentationTestCase}</li> 66 * <li>{@link android.test.ProviderTestCase}</li> 67 * <li>{@link android.test.ServiceTestCase}</li> 68 * <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul> 69 * <li>In an appropriate AndroidManifest.xml, define the this instrumentation with 70 * the appropriate android:targetPackage set. 71 * <li>Run the instrumentation using "adb shell am instrument -w", 72 * with no optional arguments, to run all tests (except performance tests). 73 * <li>Run the instrumentation using "adb shell am instrument -w", 74 * with the argument '-e func true' to run all functional tests. These are tests that derive from 75 * {@link android.test.InstrumentationTestCase}. 76 * <li>Run the instrumentation using "adb shell am instrument -w", 77 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive 78 * from {@link android.test.InstrumentationTestCase} (and are not performance tests). 79 * <li>Run the instrumentation using "adb shell am instrument -w", 80 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}. 81 * </ol> 82 * <p/> 83 * <b>Running all tests:</b> adb shell am instrument -w 84 * com.android.foo/android.test.InstrumentationTestRunner 85 * <p/> 86 * <b>Running all small tests:</b> adb shell am instrument -w 87 * -e size small 88 * com.android.foo/android.test.InstrumentationTestRunner 89 * <p/> 90 * <b>Running all medium tests:</b> adb shell am instrument -w 91 * -e size medium 92 * com.android.foo/android.test.InstrumentationTestRunner 93 * <p/> 94 * <b>Running all large tests:</b> adb shell am instrument -w 95 * -e size large 96 * com.android.foo/android.test.InstrumentationTestRunner 97 * <p/> 98 * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w 99 * -e annotation com.android.foo.MyAnnotation 100 * com.android.foo/android.test.InstrumentationTestRunner 101 * <p/> 102 * If used with other options, the resulting test run will contain the union of the two options. 103 * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both 104 * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations. 105 * <p/> 106 * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w 107 * -e notAnnotation com.android.foo.MyAnnotation 108 * com.android.foo/android.test.InstrumentationTestRunner 109 * <p/> 110 * <b>Running a single testcase:</b> adb shell am instrument -w 111 * -e class com.android.foo.FooTest 112 * com.android.foo/android.test.InstrumentationTestRunner 113 * <p/> 114 * <b>Running a single test:</b> adb shell am instrument -w 115 * -e class com.android.foo.FooTest#testFoo 116 * com.android.foo/android.test.InstrumentationTestRunner 117 * <p/> 118 * <b>Running multiple tests:</b> adb shell am instrument -w 119 * -e class com.android.foo.FooTest,com.android.foo.TooTest 120 * com.android.foo/android.test.InstrumentationTestRunner 121 * <p/> 122 * <b>Running all tests in a java package:</b> adb shell am instrument -w 123 * -e package com.android.foo.subpkg 124 * com.android.foo/android.test.InstrumentationTestRunner 125 * <p/> 126 * <b>Including performance tests:</b> adb shell am instrument -w 127 * -e perf true 128 * com.android.foo/android.test.InstrumentationTestRunner 129 * <p/> 130 * <b>To debug your tests, set a break point in your code and pass:</b> 131 * -e debug true 132 * <p/> 133 * <b>To run in 'log only' mode</b> 134 * -e log true 135 * This option will load and iterate through all test classes and methods, but will bypass actual 136 * test execution. Useful for quickly obtaining info on the tests to be executed by an 137 * instrumentation command. 138 * <p/> 139 * <b>To generate EMMA code coverage:</b> 140 * -e coverage true 141 * Note: this requires an emma instrumented build. By default, the code coverage results file 142 * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see 143 * below) 144 * <p/> 145 * <b> To specify EMMA code coverage results file path:</b> 146 * -e coverageFile /sdcard/myFile.ec 147 * <br/> 148 * in addition to the other arguments. 149 */ 150 151/* (not JavaDoc) 152 * Although not necessary in most case, another way to use this class is to extend it and have the 153 * derived class return the desired test suite from the {@link #getTestSuite()} method. The test 154 * suite returned from this method will be used if no target class is defined in the meta-data or 155 * command line argument parameters. If a derived class is used it needs to be added as an 156 * instrumentation to the AndroidManifest.xml and the command to run it would look like: 157 * <p/> 158 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i> 159 * <p/> 160 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class. 161 * 162 * This model is used by many existing app tests, but can probably be deprecated. 163 */ 164public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider { 165 166 /** @hide */ 167 public static final String ARGUMENT_TEST_CLASS = "class"; 168 /** @hide */ 169 public static final String ARGUMENT_TEST_PACKAGE = "package"; 170 /** @hide */ 171 public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size"; 172 /** @hide */ 173 public static final String ARGUMENT_DELAY_MSEC = "delay_msec"; 174 175 private static final String SMALL_SUITE = "small"; 176 private static final String MEDIUM_SUITE = "medium"; 177 private static final String LARGE_SUITE = "large"; 178 179 private static final String ARGUMENT_LOG_ONLY = "log"; 180 /** @hide */ 181 static final String ARGUMENT_ANNOTATION = "annotation"; 182 /** @hide */ 183 static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation"; 184 185 /** 186 * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" 187 * suite. It is used to make an educated guess at what suite an unlabeled test belongs. 188 */ 189 private static final float SMALL_SUITE_MAX_RUNTIME = 100; 190 191 /** 192 * This constant defines the maximum allowed runtime (in ms) for a test included in the 193 * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs. 194 */ 195 private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000; 196 197 /** 198 * The following keys are used in the status bundle to provide structured reports to 199 * an IInstrumentationWatcher. 200 */ 201 202 /** 203 * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER}, 204 * identifies InstrumentationTestRunner as the source of the report. This is sent with all 205 * status messages. 206 */ 207 public static final String REPORT_VALUE_ID = "InstrumentationTestRunner"; 208 /** 209 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 210 * identifies the total number of tests that are being run. This is sent with all status 211 * messages. 212 */ 213 public static final String REPORT_KEY_NUM_TOTAL = "numtests"; 214 /** 215 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 216 * identifies the sequence number of the current test. This is sent with any status message 217 * describing a specific test being started or completed. 218 */ 219 public static final String REPORT_KEY_NUM_CURRENT = "current"; 220 /** 221 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 222 * identifies the name of the current test class. This is sent with any status message 223 * describing a specific test being started or completed. 224 */ 225 public static final String REPORT_KEY_NAME_CLASS = "class"; 226 /** 227 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 228 * identifies the name of the current test. This is sent with any status message 229 * describing a specific test being started or completed. 230 */ 231 public static final String REPORT_KEY_NAME_TEST = "test"; 232 /** 233 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 234 * reports the run time in seconds of the current test. 235 */ 236 private static final String REPORT_KEY_RUN_TIME = "runtime"; 237 /** 238 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 239 * reports the number of total iterations of the current test. 240 */ 241 private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations"; 242 /** 243 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 244 * reports the guessed suite assignment for the current test. 245 */ 246 private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment"; 247 /** 248 * If included in the status or final bundle sent to an IInstrumentationWatcher, this key 249 * identifies the path to the generated code coverage file. 250 */ 251 private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath"; 252 253 /** 254 * The test is starting. 255 */ 256 public static final int REPORT_VALUE_RESULT_START = 1; 257 /** 258 * The test completed successfully. 259 */ 260 public static final int REPORT_VALUE_RESULT_OK = 0; 261 /** 262 * The test completed with an error. 263 */ 264 public static final int REPORT_VALUE_RESULT_ERROR = -1; 265 /** 266 * The test completed with a failure. 267 */ 268 public static final int REPORT_VALUE_RESULT_FAILURE = -2; 269 /** 270 * If included in the status bundle sent to an IInstrumentationWatcher, this key 271 * identifies a stack trace describing an error or failure. This is sent with any status 272 * message describing a specific test being completed. 273 */ 274 public static final String REPORT_KEY_STACK = "stack"; 275 276 // Default file name for code coverage 277 private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec"; 278 279 private static final String LOG_TAG = "InstrumentationTestRunner"; 280 281 private final Bundle mResults = new Bundle(); 282 private AndroidTestRunner mTestRunner; 283 private boolean mDebug; 284 private boolean mJustCount; 285 private boolean mSuiteAssignmentMode; 286 private int mTestCount; 287 private String mPackageOfTests; 288 private boolean mCoverage; 289 private String mCoverageFilePath; 290 private int mDelayMsec; 291 292 @Override 293 public void onCreate(Bundle arguments) { 294 super.onCreate(arguments); 295 296 // Apk paths used to search for test classes when using TestSuiteBuilders. 297 String[] apkPaths = 298 {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()}; 299 ClassPathPackageInfoSource.setApkPaths(apkPaths); 300 301 Predicate<TestMethod> testSizePredicate = null; 302 Predicate<TestMethod> testAnnotationPredicate = null; 303 Predicate<TestMethod> testNotAnnotationPredicate = null; 304 String testClassesArg = null; 305 boolean logOnly = false; 306 307 if (arguments != null) { 308 // Test class name passed as an argument should override any meta-data declaration. 309 testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS); 310 mDebug = getBooleanArgument(arguments, "debug"); 311 mJustCount = getBooleanArgument(arguments, "count"); 312 mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment"); 313 mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE); 314 testSizePredicate = getSizePredicateFromArg( 315 arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE)); 316 testAnnotationPredicate = getAnnotationPredicate( 317 arguments.getString(ARGUMENT_ANNOTATION)); 318 testNotAnnotationPredicate = getNotAnnotationPredicate( 319 arguments.getString(ARGUMENT_NOT_ANNOTATION)); 320 321 logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY); 322 mCoverage = getBooleanArgument(arguments, "coverage"); 323 mCoverageFilePath = arguments.getString("coverageFile"); 324 325 try { 326 Object delay = arguments.get(ARGUMENT_DELAY_MSEC); // Accept either string or int 327 if (delay != null) mDelayMsec = Integer.parseInt(delay.toString()); 328 } catch (NumberFormatException e) { 329 Log.e(LOG_TAG, "Invalid delay_msec parameter", e); 330 } 331 } 332 333 TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(), 334 getTargetContext().getClassLoader()); 335 336 if (testSizePredicate != null) { 337 testSuiteBuilder.addRequirements(testSizePredicate); 338 } 339 if (testAnnotationPredicate != null) { 340 testSuiteBuilder.addRequirements(testAnnotationPredicate); 341 } 342 if (testNotAnnotationPredicate != null) { 343 testSuiteBuilder.addRequirements(testNotAnnotationPredicate); 344 } 345 346 if (testClassesArg == null) { 347 if (mPackageOfTests != null) { 348 testSuiteBuilder.includePackages(mPackageOfTests); 349 } else { 350 TestSuite testSuite = getTestSuite(); 351 if (testSuite != null) { 352 testSuiteBuilder.addTestSuite(testSuite); 353 } else { 354 // no package or class bundle arguments were supplied, and no test suite 355 // provided so add all tests in application 356 testSuiteBuilder.includePackages(""); 357 } 358 } 359 } else { 360 parseTestClasses(testClassesArg, testSuiteBuilder); 361 } 362 363 testSuiteBuilder.addRequirements(getBuilderRequirements()); 364 365 mTestRunner = getAndroidTestRunner(); 366 mTestRunner.setContext(getTargetContext()); 367 mTestRunner.setInstrumentation(this); 368 mTestRunner.setSkipExecution(logOnly); 369 mTestRunner.setTest(testSuiteBuilder.build()); 370 mTestCount = mTestRunner.getTestCases().size(); 371 if (mSuiteAssignmentMode) { 372 mTestRunner.addTestListener(new SuiteAssignmentPrinter()); 373 } else { 374 WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount); 375 mTestRunner.addTestListener(new TestPrinter("TestRunner", false)); 376 mTestRunner.addTestListener(resultPrinter); 377 mTestRunner.setPerformanceResultsWriter(resultPrinter); 378 } 379 start(); 380 } 381 382 List<Predicate<TestMethod>> getBuilderRequirements() { 383 return new ArrayList<Predicate<TestMethod>>(); 384 } 385 386 /** 387 * Parses and loads the specified set of test classes 388 * 389 * @param testClassArg - comma-separated list of test classes and methods 390 * @param testSuiteBuilder - builder to add tests to 391 */ 392 private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) { 393 String[] testClasses = testClassArg.split(","); 394 for (String testClass : testClasses) { 395 parseTestClass(testClass, testSuiteBuilder); 396 } 397 } 398 399 /** 400 * Parse and load the given test class and, optionally, method 401 * 402 * @param testClassName - full package name of test class and optionally method to add. 403 * Expected format: com.android.TestClass#testMethod 404 * @param testSuiteBuilder - builder to add tests to 405 */ 406 private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) { 407 int methodSeparatorIndex = testClassName.indexOf('#'); 408 String testMethodName = null; 409 410 if (methodSeparatorIndex > 0) { 411 testMethodName = testClassName.substring(methodSeparatorIndex + 1); 412 testClassName = testClassName.substring(0, methodSeparatorIndex); 413 } 414 testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext()); 415 } 416 417 protected AndroidTestRunner getAndroidTestRunner() { 418 return new AndroidTestRunner(); 419 } 420 421 private boolean getBooleanArgument(Bundle arguments, String tag) { 422 String tagString = arguments.getString(tag); 423 return tagString != null && Boolean.parseBoolean(tagString); 424 } 425 426 /* 427 * Returns the size predicate object, corresponding to the "size" argument value. 428 */ 429 private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) { 430 431 if (SMALL_SUITE.equals(sizeArg)) { 432 return TestPredicates.SELECT_SMALL; 433 } else if (MEDIUM_SUITE.equals(sizeArg)) { 434 return TestPredicates.SELECT_MEDIUM; 435 } else if (LARGE_SUITE.equals(sizeArg)) { 436 return TestPredicates.SELECT_LARGE; 437 } else { 438 return null; 439 } 440 } 441 442 /** 443 * Returns the test predicate object, corresponding to the annotation class value provided via 444 * the {@link ARGUMENT_ANNOTATION} argument. 445 * 446 * @return the predicate or <code>null</code> 447 */ 448 private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) { 449 Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName); 450 if (annotationClass != null) { 451 return new HasAnnotation(annotationClass); 452 } 453 return null; 454 } 455 456 /** 457 * Returns the negative test predicate object, corresponding to the annotation class value 458 * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument. 459 * 460 * @return the predicate or <code>null</code> 461 */ 462 private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) { 463 Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName); 464 if (annotationClass != null) { 465 return Predicates.not(new HasAnnotation(annotationClass)); 466 } 467 return null; 468 } 469 470 /** 471 * Helper method to return the annotation class with specified name 472 * 473 * @param annotationClassName the fully qualified name of the class 474 * @return the annotation class or <code>null</code> 475 */ 476 private Class<? extends Annotation> getAnnotationClass(String annotationClassName) { 477 if (annotationClassName == null) { 478 return null; 479 } 480 try { 481 Class<?> annotationClass = Class.forName(annotationClassName); 482 if (annotationClass.isAnnotation()) { 483 return (Class<? extends Annotation>)annotationClass; 484 } else { 485 Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation", 486 annotationClassName)); 487 } 488 } catch (ClassNotFoundException e) { 489 Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s", 490 annotationClassName)); 491 } 492 return null; 493 } 494 495 /** 496 * Initialize the current thread as a looper. 497 * <p/> 498 * Exposed for unit testing. 499 */ 500 void prepareLooper() { 501 Looper.prepare(); 502 } 503 504 @Override 505 public void onStart() { 506 prepareLooper(); 507 508 if (mJustCount) { 509 mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); 510 mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount); 511 finish(Activity.RESULT_OK, mResults); 512 } else { 513 if (mDebug) { 514 Debug.waitForDebugger(); 515 } 516 517 ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); 518 PrintStream writer = new PrintStream(byteArrayOutputStream); 519 try { 520 StringResultPrinter resultPrinter = new StringResultPrinter(writer); 521 522 mTestRunner.addTestListener(resultPrinter); 523 524 long startTime = System.currentTimeMillis(); 525 mTestRunner.runTest(); 526 long runTime = System.currentTimeMillis() - startTime; 527 528 resultPrinter.print(mTestRunner.getTestResult(), runTime); 529 } catch (Throwable t) { 530 // catch all exceptions so a more verbose error message can be outputted 531 writer.println(String.format("Test run aborted due to unexpected exception: %s", 532 t.getMessage())); 533 t.printStackTrace(writer); 534 } finally { 535 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 536 String.format("\nTest results for %s=%s", 537 mTestRunner.getTestClassName(), 538 byteArrayOutputStream.toString())); 539 540 if (mCoverage) { 541 generateCoverageReport(); 542 } 543 writer.close(); 544 545 finish(Activity.RESULT_OK, mResults); 546 } 547 } 548 } 549 550 public TestSuite getTestSuite() { 551 return getAllTests(); 552 } 553 554 /** 555 * Override this to define all of the tests to run in your package. 556 */ 557 public TestSuite getAllTests() { 558 return null; 559 } 560 561 /** 562 * Override this to provide access to the class loader of your package. 563 */ 564 public ClassLoader getLoader() { 565 return null; 566 } 567 568 private void generateCoverageReport() { 569 // use reflection to call emma dump coverage method, to avoid 570 // always statically compiling against emma jar 571 String coverageFilePath = getCoverageFilePath(); 572 java.io.File coverageFile = new java.io.File(coverageFilePath); 573 try { 574 Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT"); 575 Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData", 576 coverageFile.getClass(), boolean.class, boolean.class); 577 578 dumpCoverageMethod.invoke(null, coverageFile, false, false); 579 // output path to generated coverage file so it can be parsed by a test harness if 580 // needed 581 mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath); 582 // also output a more user friendly msg 583 final String currentStream = mResults.getString( 584 Instrumentation.REPORT_KEY_STREAMRESULT); 585 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 586 String.format("%s\nGenerated code coverage data to %s", currentStream, 587 coverageFilePath)); 588 } catch (ClassNotFoundException e) { 589 reportEmmaError("Is emma jar on classpath?", e); 590 } catch (SecurityException e) { 591 reportEmmaError(e); 592 } catch (NoSuchMethodException e) { 593 reportEmmaError(e); 594 } catch (IllegalArgumentException e) { 595 reportEmmaError(e); 596 } catch (IllegalAccessException e) { 597 reportEmmaError(e); 598 } catch (InvocationTargetException e) { 599 reportEmmaError(e); 600 } 601 } 602 603 private String getCoverageFilePath() { 604 if (mCoverageFilePath == null) { 605 return getTargetContext().getFilesDir().getAbsolutePath() + File.separator + 606 DEFAULT_COVERAGE_FILE_NAME; 607 } else { 608 return mCoverageFilePath; 609 } 610 } 611 612 private void reportEmmaError(Exception e) { 613 reportEmmaError("", e); 614 } 615 616 private void reportEmmaError(String hint, Exception e) { 617 String msg = "Failed to generate emma coverage. " + hint; 618 Log.e(LOG_TAG, msg, e); 619 mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg); 620 } 621 622 // TODO kill this, use status() and prettyprint model for better output 623 private class StringResultPrinter extends ResultPrinter { 624 625 public StringResultPrinter(PrintStream writer) { 626 super(writer); 627 } 628 629 synchronized void print(TestResult result, long runTime) { 630 printHeader(runTime); 631 printFooter(result); 632 } 633 } 634 635 /** 636 * This class sends status reports back to the IInstrumentationWatcher about 637 * which suite each test belongs. 638 */ 639 private class SuiteAssignmentPrinter implements TestListener { 640 641 private Bundle mTestResult; 642 private long mStartTime; 643 private long mEndTime; 644 private boolean mTimingValid; 645 646 public SuiteAssignmentPrinter() { 647 } 648 649 /** 650 * send a status for the start of a each test, so long tests can be seen as "running" 651 */ 652 public void startTest(Test test) { 653 mTimingValid = true; 654 mStartTime = System.currentTimeMillis(); 655 } 656 657 /** 658 * @see junit.framework.TestListener#addError(Test, Throwable) 659 */ 660 public void addError(Test test, Throwable t) { 661 mTimingValid = false; 662 } 663 664 /** 665 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) 666 */ 667 public void addFailure(Test test, AssertionFailedError t) { 668 mTimingValid = false; 669 } 670 671 /** 672 * @see junit.framework.TestListener#endTest(Test) 673 */ 674 public void endTest(Test test) { 675 float runTime; 676 String assignmentSuite; 677 mEndTime = System.currentTimeMillis(); 678 mTestResult = new Bundle(); 679 680 if (!mTimingValid || mStartTime < 0) { 681 assignmentSuite = "NA"; 682 runTime = -1; 683 } else { 684 runTime = mEndTime - mStartTime; 685 if (runTime < SMALL_SUITE_MAX_RUNTIME 686 && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) { 687 assignmentSuite = SMALL_SUITE; 688 } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) { 689 assignmentSuite = MEDIUM_SUITE; 690 } else { 691 assignmentSuite = LARGE_SUITE; 692 } 693 } 694 // Clear mStartTime so that we can verify that it gets set next time. 695 mStartTime = -1; 696 697 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 698 test.getClass().getName() + "#" + ((TestCase) test).getName() 699 + "\nin " + assignmentSuite + " suite\nrunTime: " 700 + String.valueOf(runTime) + "\n"); 701 mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime); 702 mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite); 703 704 sendStatus(0, mTestResult); 705 } 706 } 707 708 /** 709 * This class sends status reports back to the IInstrumentationWatcher 710 */ 711 private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter { 712 private final Bundle mResultTemplate; 713 Bundle mTestResult; 714 int mTestNum = 0; 715 int mTestResultCode = 0; 716 String mTestClass = null; 717 PerformanceCollector mPerfCollector = new PerformanceCollector(); 718 boolean mIsTimedTest = false; 719 boolean mIncludeDetailedStats = false; 720 721 public WatcherResultPrinter(int numTests) { 722 mResultTemplate = new Bundle(); 723 mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); 724 mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests); 725 } 726 727 /** 728 * send a status for the start of a each test, so long tests can be seen 729 * as "running" 730 */ 731 public void startTest(Test test) { 732 String testClass = test.getClass().getName(); 733 String testName = ((TestCase)test).getName(); 734 mTestResult = new Bundle(mResultTemplate); 735 mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass); 736 mTestResult.putString(REPORT_KEY_NAME_TEST, testName); 737 mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum); 738 // pretty printing 739 if (testClass != null && !testClass.equals(mTestClass)) { 740 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 741 String.format("\n%s:", testClass)); 742 mTestClass = testClass; 743 } else { 744 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ""); 745 } 746 747 Method testMethod = null; 748 try { 749 testMethod = test.getClass().getMethod(testName); 750 // Report total number of iterations, if test is repetitive 751 if (testMethod.isAnnotationPresent(RepetitiveTest.class)) { 752 int numIterations = testMethod.getAnnotation( 753 RepetitiveTest.class).numIterations(); 754 mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations); 755 } 756 } catch (NoSuchMethodException e) { 757 // ignore- the test with given name does not exist. Will be handled during test 758 // execution 759 } 760 761 // The delay_msec parameter is normally used to provide buffers of idle time 762 // for power measurement purposes. To make sure there is a delay before and after 763 // every test in a suite, we delay *after* every test (see endTest below) and also 764 // delay *before* the first test. So, delay test1 delay test2 delay. 765 766 try { 767 if (mTestNum == 1) Thread.sleep(mDelayMsec); 768 } catch (InterruptedException e) { 769 throw new IllegalStateException(e); 770 } 771 772 sendStatus(REPORT_VALUE_RESULT_START, mTestResult); 773 mTestResultCode = 0; 774 775 mIsTimedTest = false; 776 mIncludeDetailedStats = false; 777 try { 778 // Look for TimedTest annotation on both test class and test method 779 if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) { 780 mIsTimedTest = true; 781 mIncludeDetailedStats = testMethod.getAnnotation( 782 TimedTest.class).includeDetailedStats(); 783 } else if (test.getClass().isAnnotationPresent(TimedTest.class)) { 784 mIsTimedTest = true; 785 mIncludeDetailedStats = test.getClass().getAnnotation( 786 TimedTest.class).includeDetailedStats(); 787 } 788 } catch (SecurityException e) { 789 // ignore - the test with given name cannot be accessed. Will be handled during 790 // test execution 791 } 792 793 if (mIsTimedTest && mIncludeDetailedStats) { 794 mPerfCollector.beginSnapshot(""); 795 } else if (mIsTimedTest) { 796 mPerfCollector.startTiming(""); 797 } 798 } 799 800 /** 801 * @see junit.framework.TestListener#addError(Test, Throwable) 802 */ 803 public void addError(Test test, Throwable t) { 804 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); 805 mTestResultCode = REPORT_VALUE_RESULT_ERROR; 806 // pretty printing 807 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 808 String.format("\nError in %s:\n%s", 809 ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t))); 810 } 811 812 /** 813 * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) 814 */ 815 public void addFailure(Test test, AssertionFailedError t) { 816 mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); 817 mTestResultCode = REPORT_VALUE_RESULT_FAILURE; 818 // pretty printing 819 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, 820 String.format("\nFailure in %s:\n%s", 821 ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t))); 822 } 823 824 /** 825 * @see junit.framework.TestListener#endTest(Test) 826 */ 827 public void endTest(Test test) { 828 if (mIsTimedTest && mIncludeDetailedStats) { 829 mTestResult.putAll(mPerfCollector.endSnapshot()); 830 } else if (mIsTimedTest) { 831 writeStopTiming(mPerfCollector.stopTiming("")); 832 } 833 834 if (mTestResultCode == 0) { 835 mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "."); 836 } 837 sendStatus(mTestResultCode, mTestResult); 838 839 try { // Sleep after every test, if specified 840 Thread.sleep(mDelayMsec); 841 } catch (InterruptedException e) { 842 throw new IllegalStateException(e); 843 } 844 } 845 846 public void writeBeginSnapshot(String label) { 847 // Do nothing 848 } 849 850 public void writeEndSnapshot(Bundle results) { 851 // Copy all snapshot data fields into mResults, which is outputted 852 // via Instrumentation.finish 853 mResults.putAll(results); 854 } 855 856 public void writeStartTiming(String label) { 857 // Do nothing 858 } 859 860 public void writeStopTiming(Bundle results) { 861 // Copy results into mTestResult by flattening list of iterations, 862 // which is outputted via WatcherResultPrinter.endTest 863 int i = 0; 864 for (Parcelable p : 865 results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) { 866 Bundle iteration = (Bundle)p; 867 String index = "iteration" + i + "."; 868 mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL, 869 iteration.getString(PerformanceCollector.METRIC_KEY_LABEL)); 870 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME, 871 iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME)); 872 mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME, 873 iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME)); 874 i++; 875 } 876 } 877 878 public void writeMeasurement(String label, long value) { 879 mTestResult.putLong(label, value); 880 } 881 882 public void writeMeasurement(String label, float value) { 883 mTestResult.putFloat(label, value); 884 } 885 886 public void writeMeasurement(String label, String value) { 887 mTestResult.putString(label, value); 888 } 889 890 // TODO report the end of the cycle 891 } 892} 893