InstrumentationTestRunner.java revision 88e03a97366d08fc69d852cf2219f9d0b1916af4
1/*
2 * Copyright (C) 2007 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.test;
18
19import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;
20
21import com.android.internal.util.Predicate;
22import com.android.internal.util.Predicates;
23
24import android.app.Activity;
25import android.app.Instrumentation;
26import android.os.Bundle;
27import android.os.Debug;
28import android.os.Looper;
29import android.os.Parcelable;
30import android.os.PerformanceCollector;
31import android.os.PerformanceCollector.PerformanceResultsWriter;
32import android.test.suitebuilder.TestMethod;
33import android.test.suitebuilder.TestPredicates;
34import android.test.suitebuilder.TestSuiteBuilder;
35import android.test.suitebuilder.annotation.HasAnnotation;
36import android.util.Log;
37
38import java.io.ByteArrayOutputStream;
39import java.io.File;
40import java.io.PrintStream;
41import java.lang.annotation.Annotation;
42import java.lang.reflect.InvocationTargetException;
43import java.lang.reflect.Method;
44import java.util.ArrayList;
45import java.util.List;
46
47import junit.framework.AssertionFailedError;
48import junit.framework.Test;
49import junit.framework.TestCase;
50import junit.framework.TestListener;
51import junit.framework.TestResult;
52import junit.framework.TestSuite;
53import junit.runner.BaseTestRunner;
54import junit.textui.ResultPrinter;
55
56/**
57 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
58 * an Android package (application). Typical usage:
59 * <ol>
60 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
61 * against the classes in your package.  Typically these are subclassed from:
62 *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
63 *   <li>{@link android.test.ActivityUnitTestCase}</li>
64 *   <li>{@link android.test.AndroidTestCase}</li>
65 *   <li>{@link android.test.ApplicationTestCase}</li>
66 *   <li>{@link android.test.InstrumentationTestCase}</li>
67 *   <li>{@link android.test.ProviderTestCase}</li>
68 *   <li>{@link android.test.ServiceTestCase}</li>
69 *   <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
70 *   <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
71 * the appropriate android:targetPackage set.
72 * <li>Run the instrumentation using "adb shell am instrument -w",
73 * with no optional arguments, to run all tests (except performance tests).
74 * <li>Run the instrumentation using "adb shell am instrument -w",
75 * with the argument '-e func true' to run all functional tests. These are tests that derive from
76 * {@link android.test.InstrumentationTestCase}.
77 * <li>Run the instrumentation using "adb shell am instrument -w",
78 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
79 * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
80 * <li>Run the instrumentation using "adb shell am instrument -w",
81 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
82 * </ol>
83 * <p/>
84 * <b>Running all tests:</b> adb shell am instrument -w
85 * com.android.foo/android.test.InstrumentationTestRunner
86 * <p/>
87 * <b>Running all small tests:</b> adb shell am instrument -w
88 * -e size small
89 * com.android.foo/android.test.InstrumentationTestRunner
90 * <p/>
91 * <b>Running all medium tests:</b> adb shell am instrument -w
92 * -e size medium
93 * com.android.foo/android.test.InstrumentationTestRunner
94 * <p/>
95 * <b>Running all large tests:</b> adb shell am instrument -w
96 * -e size large
97 * com.android.foo/android.test.InstrumentationTestRunner
98 * <p/>
99 * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w
100 * -e annotation com.android.foo.MyAnnotation
101 * com.android.foo/android.test.InstrumentationTestRunner
102 * <p/>
103 * If used with other options, the resulting test run will contain the union of the two options.
104 * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both
105 * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations.
106 * <p/>
107 * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w
108 * -e notAnnotation com.android.foo.MyAnnotation
109 * com.android.foo/android.test.InstrumentationTestRunner
110 * <p/>
111 * <b>Running a single testcase:</b> adb shell am instrument -w
112 * -e class com.android.foo.FooTest
113 * com.android.foo/android.test.InstrumentationTestRunner
114 * <p/>
115 * <b>Running a single test:</b> adb shell am instrument -w
116 * -e class com.android.foo.FooTest#testFoo
117 * com.android.foo/android.test.InstrumentationTestRunner
118 * <p/>
119 * <b>Running multiple tests:</b> adb shell am instrument -w
120 * -e class com.android.foo.FooTest,com.android.foo.TooTest
121 * com.android.foo/android.test.InstrumentationTestRunner
122 * <p/>
123 * <b>Including performance tests:</b> adb shell am instrument -w
124 * -e perf true
125 * com.android.foo/android.test.InstrumentationTestRunner
126 * <p/>
127 * <b>To debug your tests, set a break point in your code and pass:</b>
128 * -e debug true
129 * <p/>
130 * <b>To run in 'log only' mode</b>
131 * -e log true
132 * This option will load and iterate through all test classes and methods, but will bypass actual
133 * test execution. Useful for quickly obtaining info on the tests to be executed by an
134 * instrumentation command.
135 * <p/>
136 * <b>To generate EMMA code coverage:</b>
137 * -e coverage true
138 * Note: this requires an emma instrumented build. By default, the code coverage results file
139 * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
140 * below)
141 * <p/>
142 * <b> To specify EMMA code coverage results file path:</b>
143 * -e coverageFile /sdcard/myFile.ec
144 * <br/>
145 * in addition to the other arguments.
146 */
147
148/* (not JavaDoc)
149 * Although not necessary in most case, another way to use this class is to extend it and have the
150 * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
151 * suite returned from this method will be used if no target class is defined in the meta-data or
152 * command line argument parameters. If a derived class is used it needs to be added as an
153 * instrumentation to the AndroidManifest.xml and the command to run it would look like:
154 * <p/>
155 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
156 * <p/>
157 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
158 *
159 * This model is used by many existing app tests, but can probably be deprecated.
160 */
161public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
162
163    /** @hide */
164    public static final String ARGUMENT_TEST_CLASS = "class";
165    /** @hide */
166    public static final String ARGUMENT_TEST_PACKAGE = "package";
167    /** @hide */
168    public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
169    /** @hide */
170    public static final String ARGUMENT_INCLUDE_PERF = "perf";
171    /** @hide */
172    public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
173
174    private static final String SMALL_SUITE = "small";
175    private static final String MEDIUM_SUITE = "medium";
176    private static final String LARGE_SUITE = "large";
177
178    private static final String ARGUMENT_LOG_ONLY = "log";
179    /** @hide */
180    static final String ARGUMENT_ANNOTATION = "annotation";
181    /** @hide */
182    static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation";
183
184    /**
185     * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
186     * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
187     */
188    private static final float SMALL_SUITE_MAX_RUNTIME = 100;
189
190    /**
191     * This constant defines the maximum allowed runtime (in ms) for a test included in the
192     * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
193     */
194    private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
195
196    /**
197     * The following keys are used in the status bundle to provide structured reports to
198     * an IInstrumentationWatcher.
199     */
200
201    /**
202     * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
203     * identifies InstrumentationTestRunner as the source of the report.  This is sent with all
204     * status messages.
205     */
206    public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
207    /**
208     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
209     * identifies the total number of tests that are being run.  This is sent with all status
210     * messages.
211     */
212    public static final String REPORT_KEY_NUM_TOTAL = "numtests";
213    /**
214     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
215     * identifies the sequence number of the current test.  This is sent with any status message
216     * describing a specific test being started or completed.
217     */
218    public static final String REPORT_KEY_NUM_CURRENT = "current";
219    /**
220     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
221     * identifies the name of the current test class.  This is sent with any status message
222     * describing a specific test being started or completed.
223     */
224    public static final String REPORT_KEY_NAME_CLASS = "class";
225    /**
226     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
227     * identifies the name of the current test.  This is sent with any status message
228     * describing a specific test being started or completed.
229     */
230    public static final String REPORT_KEY_NAME_TEST = "test";
231    /**
232     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
233     * reports the run time in seconds of the current test.
234     */
235    private static final String REPORT_KEY_RUN_TIME = "runtime";
236    /**
237     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
238     * reports the guessed suite assignment for the current test.
239     */
240    private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
241    /**
242     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
243     * identifies the path to the generated code coverage file.
244     */
245    private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
246
247    /**
248     * The test is starting.
249     */
250    public static final int REPORT_VALUE_RESULT_START = 1;
251    /**
252     * The test completed successfully.
253     */
254    public static final int REPORT_VALUE_RESULT_OK = 0;
255    /**
256     * The test completed with an error.
257     */
258    public static final int REPORT_VALUE_RESULT_ERROR = -1;
259    /**
260     * The test completed with a failure.
261     */
262    public static final int REPORT_VALUE_RESULT_FAILURE = -2;
263    /**
264     * If included in the status bundle sent to an IInstrumentationWatcher, this key
265     * identifies a stack trace describing an error or failure.  This is sent with any status
266     * message describing a specific test being completed.
267     */
268    public static final String REPORT_KEY_STACK = "stack";
269
270    // Default file name for code coverage
271    private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
272
273    private static final String LOG_TAG = "InstrumentationTestRunner";
274
275    private final Bundle mResults = new Bundle();
276    private AndroidTestRunner mTestRunner;
277    private boolean mDebug;
278    private boolean mJustCount;
279    private boolean mSuiteAssignmentMode;
280    private int mTestCount;
281    private String mPackageOfTests;
282    private boolean mCoverage;
283    private String mCoverageFilePath;
284    private int mDelayMsec;
285
286    @Override
287    public void onCreate(Bundle arguments) {
288        super.onCreate(arguments);
289
290        // Apk paths used to search for test classes when using TestSuiteBuilders.
291        String[] apkPaths =
292                {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
293        ClassPathPackageInfoSource.setApkPaths(apkPaths);
294
295        Predicate<TestMethod> testSizePredicate = null;
296        Predicate<TestMethod> testAnnotationPredicate = null;
297        Predicate<TestMethod> testNotAnnotationPredicate = null;
298        boolean includePerformance = false;
299        String testClassesArg = null;
300        boolean logOnly = false;
301
302        if (arguments != null) {
303            // Test class name passed as an argument should override any meta-data declaration.
304            testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
305            mDebug = getBooleanArgument(arguments, "debug");
306            mJustCount = getBooleanArgument(arguments, "count");
307            mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
308            mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
309            testSizePredicate = getSizePredicateFromArg(
310                    arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
311            testAnnotationPredicate = getAnnotationPredicate(
312                    arguments.getString(ARGUMENT_ANNOTATION));
313            testNotAnnotationPredicate = getNotAnnotationPredicate(
314                    arguments.getString(ARGUMENT_NOT_ANNOTATION));
315
316            includePerformance = getBooleanArgument(arguments, ARGUMENT_INCLUDE_PERF);
317            logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
318            mCoverage = getBooleanArgument(arguments, "coverage");
319            mCoverageFilePath = arguments.getString("coverageFile");
320
321            try {
322                Object delay = arguments.get(ARGUMENT_DELAY_MSEC);  // Accept either string or int
323                if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
324            } catch (NumberFormatException e) {
325                Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
326            }
327        }
328
329        TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
330                getTargetContext().getClassLoader());
331
332        if (testSizePredicate != null) {
333            testSuiteBuilder.addRequirements(testSizePredicate);
334        }
335        if (testAnnotationPredicate != null) {
336            testSuiteBuilder.addRequirements(testAnnotationPredicate);
337        }
338        if (testNotAnnotationPredicate != null) {
339            testSuiteBuilder.addRequirements(testNotAnnotationPredicate);
340        }
341        if (!includePerformance) {
342            testSuiteBuilder.addRequirements(REJECT_PERFORMANCE);
343        }
344
345        if (testClassesArg == null) {
346            if (mPackageOfTests != null) {
347                testSuiteBuilder.includePackages(mPackageOfTests);
348            } else {
349                TestSuite testSuite = getTestSuite();
350                if (testSuite != null) {
351                    testSuiteBuilder.addTestSuite(testSuite);
352                } else {
353                    // no package or class bundle arguments were supplied, and no test suite
354                    // provided so add all tests in application
355                    testSuiteBuilder.includePackages("");
356                }
357            }
358        } else {
359            parseTestClasses(testClassesArg, testSuiteBuilder);
360        }
361
362        testSuiteBuilder.addRequirements(getBuilderRequirements());
363
364        mTestRunner = getAndroidTestRunner();
365        mTestRunner.setContext(getTargetContext());
366        mTestRunner.setInstrumentation(this);
367        mTestRunner.setSkipExecution(logOnly);
368        mTestRunner.setTest(testSuiteBuilder.build());
369        mTestCount = mTestRunner.getTestCases().size();
370        if (mSuiteAssignmentMode) {
371            mTestRunner.addTestListener(new SuiteAssignmentPrinter());
372        } else {
373            WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
374            mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
375            mTestRunner.addTestListener(resultPrinter);
376            mTestRunner.setPerformanceResultsWriter(resultPrinter);
377        }
378        start();
379    }
380
381    List<Predicate<TestMethod>> getBuilderRequirements() {
382        return new ArrayList<Predicate<TestMethod>>();
383    }
384
385    /**
386     * Parses and loads the specified set of test classes
387     *
388     * @param testClassArg - comma-separated list of test classes and methods
389     * @param testSuiteBuilder - builder to add tests to
390     */
391    private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
392        String[] testClasses = testClassArg.split(",");
393        for (String testClass : testClasses) {
394            parseTestClass(testClass, testSuiteBuilder);
395        }
396    }
397
398    /**
399     * Parse and load the given test class and, optionally, method
400     *
401     * @param testClassName - full package name of test class and optionally method to add.
402     *        Expected format: com.android.TestClass#testMethod
403     * @param testSuiteBuilder - builder to add tests to
404     */
405    private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
406        int methodSeparatorIndex = testClassName.indexOf('#');
407        String testMethodName = null;
408
409        if (methodSeparatorIndex > 0) {
410            testMethodName = testClassName.substring(methodSeparatorIndex + 1);
411            testClassName = testClassName.substring(0, methodSeparatorIndex);
412        }
413        testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
414    }
415
416    protected AndroidTestRunner getAndroidTestRunner() {
417        return new AndroidTestRunner();
418    }
419
420    private boolean getBooleanArgument(Bundle arguments, String tag) {
421        String tagString = arguments.getString(tag);
422        return tagString != null && Boolean.parseBoolean(tagString);
423    }
424
425    /*
426     * Returns the size predicate object, corresponding to the "size" argument value.
427     */
428    private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
429
430        if (SMALL_SUITE.equals(sizeArg)) {
431            return TestPredicates.SELECT_SMALL;
432        } else if (MEDIUM_SUITE.equals(sizeArg)) {
433            return TestPredicates.SELECT_MEDIUM;
434        } else if (LARGE_SUITE.equals(sizeArg)) {
435            return TestPredicates.SELECT_LARGE;
436        } else {
437            return null;
438        }
439    }
440
441   /**
442    * Returns the test predicate object, corresponding to the annotation class value provided via
443    * the {@link ARGUMENT_ANNOTATION} argument.
444    *
445    * @return the predicate or <code>null</code>
446    */
447    private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) {
448        Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
449        if (annotationClass != null) {
450            return new HasAnnotation(annotationClass);
451        }
452        return null;
453    }
454
455    /**
456     * Returns the negative test predicate object, corresponding to the annotation class value
457     * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument.
458     *
459     * @return the predicate or <code>null</code>
460     */
461     private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) {
462         Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
463         if (annotationClass != null) {
464             return Predicates.not(new HasAnnotation(annotationClass));
465         }
466         return null;
467     }
468
469    /**
470     * Helper method to return the annotation class with specified name
471     *
472     * @param annotationClassName the fully qualified name of the class
473     * @return the annotation class or <code>null</code>
474     */
475    private Class<? extends Annotation> getAnnotationClass(String annotationClassName) {
476        if (annotationClassName == null) {
477            return null;
478        }
479        try {
480           Class<?> annotationClass = Class.forName(annotationClassName);
481           if (annotationClass.isAnnotation()) {
482               return (Class<? extends Annotation>)annotationClass;
483           } else {
484               Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation",
485                       annotationClassName));
486           }
487        } catch (ClassNotFoundException e) {
488            Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s",
489                    annotationClassName));
490        }
491        return null;
492    }
493
494    @Override
495    public void onStart() {
496        Looper.prepare();
497
498        if (mJustCount) {
499            mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
500            mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
501            finish(Activity.RESULT_OK, mResults);
502        } else {
503            if (mDebug) {
504                Debug.waitForDebugger();
505            }
506
507            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
508            PrintStream writer = new PrintStream(byteArrayOutputStream);
509            try {
510                StringResultPrinter resultPrinter = new StringResultPrinter(writer);
511
512                mTestRunner.addTestListener(resultPrinter);
513
514                long startTime = System.currentTimeMillis();
515                mTestRunner.runTest();
516                long runTime = System.currentTimeMillis() - startTime;
517
518                resultPrinter.print(mTestRunner.getTestResult(), runTime);
519            } finally {
520                mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
521                        String.format("\nTest results for %s=%s",
522                        mTestRunner.getTestClassName(),
523                        byteArrayOutputStream.toString()));
524
525                if (mCoverage) {
526                    generateCoverageReport();
527                }
528                writer.close();
529
530                finish(Activity.RESULT_OK, mResults);
531            }
532        }
533    }
534
535    public TestSuite getTestSuite() {
536        return getAllTests();
537    }
538
539    /**
540     * Override this to define all of the tests to run in your package.
541     */
542    public TestSuite getAllTests() {
543        return null;
544    }
545
546    /**
547     * Override this to provide access to the class loader of your package.
548     */
549    public ClassLoader getLoader() {
550        return null;
551    }
552
553    private void generateCoverageReport() {
554        // use reflection to call emma dump coverage method, to avoid
555        // always statically compiling against emma jar
556        String coverageFilePath = getCoverageFilePath();
557        java.io.File coverageFile = new java.io.File(coverageFilePath);
558        try {
559            Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
560            Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
561                    coverageFile.getClass(), boolean.class, boolean.class);
562
563            dumpCoverageMethod.invoke(null, coverageFile, false, false);
564            // output path to generated coverage file so it can be parsed by a test harness if
565            // needed
566            mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
567            // also output a more user friendly msg
568            final String currentStream = mResults.getString(
569                    Instrumentation.REPORT_KEY_STREAMRESULT);
570            mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
571                String.format("%s\nGenerated code coverage data to %s", currentStream,
572                coverageFilePath));
573        } catch (ClassNotFoundException e) {
574            reportEmmaError("Is emma jar on classpath?", e);
575        } catch (SecurityException e) {
576            reportEmmaError(e);
577        } catch (NoSuchMethodException e) {
578            reportEmmaError(e);
579        } catch (IllegalArgumentException e) {
580            reportEmmaError(e);
581        } catch (IllegalAccessException e) {
582            reportEmmaError(e);
583        } catch (InvocationTargetException e) {
584            reportEmmaError(e);
585        }
586    }
587
588    private String getCoverageFilePath() {
589        if (mCoverageFilePath == null) {
590            return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
591                   DEFAULT_COVERAGE_FILE_NAME;
592        } else {
593            return mCoverageFilePath;
594        }
595    }
596
597    private void reportEmmaError(Exception e) {
598        reportEmmaError("", e);
599    }
600
601    private void reportEmmaError(String hint, Exception e) {
602        String msg = "Failed to generate emma coverage. " + hint;
603        Log.e(LOG_TAG, msg, e);
604        mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
605    }
606
607    // TODO kill this, use status() and prettyprint model for better output
608    private class StringResultPrinter extends ResultPrinter {
609
610        public StringResultPrinter(PrintStream writer) {
611            super(writer);
612        }
613
614        synchronized void print(TestResult result, long runTime) {
615            printHeader(runTime);
616            printFooter(result);
617        }
618    }
619
620    /**
621     * This class sends status reports back to the IInstrumentationWatcher about
622     * which suite each test belongs.
623     */
624    private class SuiteAssignmentPrinter implements TestListener {
625
626        private Bundle mTestResult;
627        private long mStartTime;
628        private long mEndTime;
629        private boolean mTimingValid;
630
631        public SuiteAssignmentPrinter() {
632        }
633
634        /**
635         * send a status for the start of a each test, so long tests can be seen as "running"
636         */
637        public void startTest(Test test) {
638            mTimingValid = true;
639            mStartTime = System.currentTimeMillis();
640        }
641
642        /**
643         * @see junit.framework.TestListener#addError(Test, Throwable)
644         */
645        public void addError(Test test, Throwable t) {
646            mTimingValid = false;
647        }
648
649        /**
650         * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
651         */
652        public void addFailure(Test test, AssertionFailedError t) {
653            mTimingValid = false;
654        }
655
656        /**
657         * @see junit.framework.TestListener#endTest(Test)
658         */
659        public void endTest(Test test) {
660            float runTime;
661            String assignmentSuite;
662            mEndTime = System.currentTimeMillis();
663            mTestResult = new Bundle();
664
665            if (!mTimingValid || mStartTime < 0) {
666                assignmentSuite = "NA";
667                runTime = -1;
668            } else {
669                runTime = mEndTime - mStartTime;
670                if (runTime < SMALL_SUITE_MAX_RUNTIME
671                        && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
672                    assignmentSuite = SMALL_SUITE;
673                } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
674                    assignmentSuite = MEDIUM_SUITE;
675                } else {
676                    assignmentSuite = LARGE_SUITE;
677                }
678            }
679            // Clear mStartTime so that we can verify that it gets set next time.
680            mStartTime = -1;
681
682            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
683                    test.getClass().getName() + "#" + ((TestCase) test).getName()
684                    + "\nin " + assignmentSuite + " suite\nrunTime: "
685                    + String.valueOf(runTime) + "\n");
686            mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
687            mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
688
689            sendStatus(0, mTestResult);
690        }
691    }
692
693    /**
694     * This class sends status reports back to the IInstrumentationWatcher
695     */
696    private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
697        private final Bundle mResultTemplate;
698        Bundle mTestResult;
699        int mTestNum = 0;
700        int mTestResultCode = 0;
701        String mTestClass = null;
702        PerformanceCollector mPerfCollector = new PerformanceCollector();
703        boolean mIsTimedTest = false;
704        boolean mIncludeDetailedStats = false;
705
706        public WatcherResultPrinter(int numTests) {
707            mResultTemplate = new Bundle();
708            mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
709            mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
710        }
711
712        /**
713         * send a status for the start of a each test, so long tests can be seen
714         * as "running"
715         */
716        public void startTest(Test test) {
717            String testClass = test.getClass().getName();
718            String testName = ((TestCase)test).getName();
719            mTestResult = new Bundle(mResultTemplate);
720            mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
721            mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
722            mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
723            // pretty printing
724            if (testClass != null && !testClass.equals(mTestClass)) {
725                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
726                        String.format("\n%s:", testClass));
727                mTestClass = testClass;
728            } else {
729                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
730            }
731
732            // The delay_msec parameter is normally used to provide buffers of idle time
733            // for power measurement purposes. To make sure there is a delay before and after
734            // every test in a suite, we delay *after* every test (see endTest below) and also
735            // delay *before* the first test. So, delay test1 delay test2 delay.
736
737            try {
738                if (mTestNum == 1) Thread.sleep(mDelayMsec);
739            } catch (InterruptedException e) {
740                throw new IllegalStateException(e);
741            }
742
743            sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
744            mTestResultCode = 0;
745
746            mIsTimedTest = false;
747            mIncludeDetailedStats = false;
748            try {
749                // Look for TimedTest annotation on both test class and test method
750                if (test.getClass().getMethod(testName).isAnnotationPresent(TimedTest.class)) {
751                    mIsTimedTest = true;
752                    mIncludeDetailedStats = test.getClass().getMethod(testName).getAnnotation(
753                            TimedTest.class).includeDetailedStats();
754                } else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
755                    mIsTimedTest = true;
756                    mIncludeDetailedStats = test.getClass().getAnnotation(
757                            TimedTest.class).includeDetailedStats();
758                }
759            } catch (SecurityException e) {
760                throw new IllegalStateException(e);
761            } catch (NoSuchMethodException e) {
762                throw new IllegalStateException(e);
763            }
764
765            if (mIsTimedTest && mIncludeDetailedStats) {
766                mPerfCollector.beginSnapshot("");
767            } else if (mIsTimedTest) {
768                mPerfCollector.startTiming("");
769            }
770        }
771
772        /**
773         * @see junit.framework.TestListener#addError(Test, Throwable)
774         */
775        public void addError(Test test, Throwable t) {
776            mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
777            mTestResultCode = REPORT_VALUE_RESULT_ERROR;
778            // pretty printing
779            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
780                String.format("\nError in %s:\n%s",
781                    ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
782        }
783
784        /**
785         * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
786         */
787        public void addFailure(Test test, AssertionFailedError t) {
788            mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
789            mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
790            // pretty printing
791            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
792                String.format("\nFailure in %s:\n%s",
793                    ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
794        }
795
796        /**
797         * @see junit.framework.TestListener#endTest(Test)
798         */
799        public void endTest(Test test) {
800            if (mIsTimedTest && mIncludeDetailedStats) {
801                mTestResult.putAll(mPerfCollector.endSnapshot());
802            } else if (mIsTimedTest) {
803                writeStopTiming(mPerfCollector.stopTiming(""));
804            }
805
806            if (mTestResultCode == 0) {
807                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
808            }
809            sendStatus(mTestResultCode, mTestResult);
810
811            try { // Sleep after every test, if specified
812                Thread.sleep(mDelayMsec);
813            } catch (InterruptedException e) {
814                throw new IllegalStateException(e);
815            }
816        }
817
818        public void writeBeginSnapshot(String label) {
819            // Do nothing
820        }
821
822        public void writeEndSnapshot(Bundle results) {
823            // Copy all snapshot data fields into mResults, which is outputted
824            // via Instrumentation.finish
825            mResults.putAll(results);
826        }
827
828        public void writeStartTiming(String label) {
829            // Do nothing
830        }
831
832        public void writeStopTiming(Bundle results) {
833            // Copy results into mTestResult by flattening list of iterations,
834            // which is outputted via WatcherResultPrinter.endTest
835            int i = 0;
836            for (Parcelable p :
837                    results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
838                Bundle iteration = (Bundle)p;
839                String index = "iteration" + i + ".";
840                mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
841                        iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
842                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
843                        iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
844                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
845                        iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
846                i++;
847            }
848        }
849
850        public void writeMeasurement(String label, long value) {
851            mTestResult.putLong(label, value);
852        }
853
854        public void writeMeasurement(String label, float value) {
855            mTestResult.putFloat(label, value);
856        }
857
858        public void writeMeasurement(String label, String value) {
859            mTestResult.putString(label, value);
860        }
861
862        // TODO report the end of the cycle
863    }
864}
865