InstrumentationTestRunner.java revision 48983959a34f67024bd0411c8353c196e6a87717
1/*
2 * Copyright (C) 2007 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.test;
18
19import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;
20
21import com.android.internal.util.Predicate;
22import com.android.internal.util.Predicates;
23
24import android.app.Activity;
25import android.app.Instrumentation;
26import android.os.Bundle;
27import android.os.Debug;
28import android.os.Looper;
29import android.os.Parcelable;
30import android.os.PerformanceCollector;
31import android.os.PerformanceCollector.PerformanceResultsWriter;
32import android.test.suitebuilder.TestMethod;
33import android.test.suitebuilder.TestPredicates;
34import android.test.suitebuilder.TestSuiteBuilder;
35import android.test.suitebuilder.annotation.HasAnnotation;
36import android.test.suitebuilder.annotation.LargeTest;
37import android.util.Log;
38
39import java.io.ByteArrayOutputStream;
40import java.io.File;
41import java.io.PrintStream;
42import java.lang.annotation.Annotation;
43import java.lang.reflect.InvocationTargetException;
44import java.lang.reflect.Method;
45import java.util.ArrayList;
46import java.util.List;
47
48import junit.framework.AssertionFailedError;
49import junit.framework.Test;
50import junit.framework.TestCase;
51import junit.framework.TestListener;
52import junit.framework.TestResult;
53import junit.framework.TestSuite;
54import junit.runner.BaseTestRunner;
55import junit.textui.ResultPrinter;
56
57/**
58 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
59 * an Android package (application). Typical usage:
60 * <ol>
61 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
62 * against the classes in your package.  Typically these are subclassed from:
63 *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
64 *   <li>{@link android.test.ActivityUnitTestCase}</li>
65 *   <li>{@link android.test.AndroidTestCase}</li>
66 *   <li>{@link android.test.ApplicationTestCase}</li>
67 *   <li>{@link android.test.InstrumentationTestCase}</li>
68 *   <li>{@link android.test.ProviderTestCase}</li>
69 *   <li>{@link android.test.ServiceTestCase}</li>
70 *   <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
71 *   <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
72 * the appropriate android:targetPackage set.
73 * <li>Run the instrumentation using "adb shell am instrument -w",
74 * with no optional arguments, to run all tests (except performance tests).
75 * <li>Run the instrumentation using "adb shell am instrument -w",
76 * with the argument '-e func true' to run all functional tests. These are tests that derive from
77 * {@link android.test.InstrumentationTestCase}.
78 * <li>Run the instrumentation using "adb shell am instrument -w",
79 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
80 * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
81 * <li>Run the instrumentation using "adb shell am instrument -w",
82 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
83 * </ol>
84 * <p/>
85 * <b>Running all tests:</b> adb shell am instrument -w
86 * com.android.foo/android.test.InstrumentationTestRunner
87 * <p/>
88 * <b>Running all small tests:</b> adb shell am instrument -w
89 * -e size small
90 * com.android.foo/android.test.InstrumentationTestRunner
91 * <p/>
92 * <b>Running all medium tests:</b> adb shell am instrument -w
93 * -e size medium
94 * com.android.foo/android.test.InstrumentationTestRunner
95 * <p/>
96 * <b>Running all large tests:</b> adb shell am instrument -w
97 * -e size large
98 * com.android.foo/android.test.InstrumentationTestRunner
99 * <p/>
100 * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w
101 * -e annotation com.android.foo.MyAnnotation
102 * com.android.foo/android.test.InstrumentationTestRunner
103 * <p/>
104 * If used with other options, the resulting test run will contain the union of the two options.
105 * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both
106 * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations.
107 * <p/>
108 * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w
109 * -e notAnnotation com.android.foo.MyAnnotation
110 * com.android.foo/android.test.InstrumentationTestRunner
111 * <p/>
112 * <b>Running a single testcase:</b> adb shell am instrument -w
113 * -e class com.android.foo.FooTest
114 * com.android.foo/android.test.InstrumentationTestRunner
115 * <p/>
116 * <b>Running a single test:</b> adb shell am instrument -w
117 * -e class com.android.foo.FooTest#testFoo
118 * com.android.foo/android.test.InstrumentationTestRunner
119 * <p/>
120 * <b>Running multiple tests:</b> adb shell am instrument -w
121 * -e class com.android.foo.FooTest,com.android.foo.TooTest
122 * com.android.foo/android.test.InstrumentationTestRunner
123 * <p/>
124 * <b>Running all tests in a java package:</b> adb shell am instrument -w
125 * -e package com.android.foo.subpkg
126 *  com.android.foo/android.test.InstrumentationTestRunner
127 * <p/>
128 * <b>Including performance tests:</b> adb shell am instrument -w
129 * -e perf true
130 * com.android.foo/android.test.InstrumentationTestRunner
131 * <p/>
132 * <b>To debug your tests, set a break point in your code and pass:</b>
133 * -e debug true
134 * <p/>
135 * <b>To run in 'log only' mode</b>
136 * -e log true
137 * This option will load and iterate through all test classes and methods, but will bypass actual
138 * test execution. Useful for quickly obtaining info on the tests to be executed by an
139 * instrumentation command.
140 * <p/>
141 * <b>To generate EMMA code coverage:</b>
142 * -e coverage true
143 * Note: this requires an emma instrumented build. By default, the code coverage results file
144 * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
145 * below)
146 * <p/>
147 * <b> To specify EMMA code coverage results file path:</b>
148 * -e coverageFile /sdcard/myFile.ec
149 * <br/>
150 * in addition to the other arguments.
151 */
152
153/* (not JavaDoc)
154 * Although not necessary in most case, another way to use this class is to extend it and have the
155 * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
156 * suite returned from this method will be used if no target class is defined in the meta-data or
157 * command line argument parameters. If a derived class is used it needs to be added as an
158 * instrumentation to the AndroidManifest.xml and the command to run it would look like:
159 * <p/>
160 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
161 * <p/>
162 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
163 *
164 * This model is used by many existing app tests, but can probably be deprecated.
165 */
166public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
167
168    /** @hide */
169    public static final String ARGUMENT_TEST_CLASS = "class";
170    /** @hide */
171    public static final String ARGUMENT_TEST_PACKAGE = "package";
172    /** @hide */
173    public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
174    /** @hide */
175    public static final String ARGUMENT_INCLUDE_PERF = "perf";
176    /** @hide */
177    public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
178
179    private static final String SMALL_SUITE = "small";
180    private static final String MEDIUM_SUITE = "medium";
181    private static final String LARGE_SUITE = "large";
182
183    private static final String ARGUMENT_LOG_ONLY = "log";
184    /** @hide */
185    static final String ARGUMENT_ANNOTATION = "annotation";
186    /** @hide */
187    static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation";
188
189    /**
190     * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
191     * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
192     */
193    private static final float SMALL_SUITE_MAX_RUNTIME = 100;
194
195    /**
196     * This constant defines the maximum allowed runtime (in ms) for a test included in the
197     * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
198     */
199    private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
200
201    /**
202     * The following keys are used in the status bundle to provide structured reports to
203     * an IInstrumentationWatcher.
204     */
205
206    /**
207     * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
208     * identifies InstrumentationTestRunner as the source of the report.  This is sent with all
209     * status messages.
210     */
211    public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
212    /**
213     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
214     * identifies the total number of tests that are being run.  This is sent with all status
215     * messages.
216     */
217    public static final String REPORT_KEY_NUM_TOTAL = "numtests";
218    /**
219     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
220     * identifies the sequence number of the current test.  This is sent with any status message
221     * describing a specific test being started or completed.
222     */
223    public static final String REPORT_KEY_NUM_CURRENT = "current";
224    /**
225     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
226     * identifies the name of the current test class.  This is sent with any status message
227     * describing a specific test being started or completed.
228     */
229    public static final String REPORT_KEY_NAME_CLASS = "class";
230    /**
231     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
232     * identifies the name of the current test.  This is sent with any status message
233     * describing a specific test being started or completed.
234     */
235    public static final String REPORT_KEY_NAME_TEST = "test";
236    /**
237     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
238     * reports the run time in seconds of the current test.
239     */
240    private static final String REPORT_KEY_RUN_TIME = "runtime";
241    /**
242     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
243     * reports the number of total iterations of the current test.
244     */
245    private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations";
246    /**
247     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
248     * reports the guessed suite assignment for the current test.
249     */
250    private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
251    /**
252     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
253     * identifies the path to the generated code coverage file.
254     */
255    private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
256
257    /**
258     * The test is starting.
259     */
260    public static final int REPORT_VALUE_RESULT_START = 1;
261    /**
262     * The test completed successfully.
263     */
264    public static final int REPORT_VALUE_RESULT_OK = 0;
265    /**
266     * The test completed with an error.
267     */
268    public static final int REPORT_VALUE_RESULT_ERROR = -1;
269    /**
270     * The test completed with a failure.
271     */
272    public static final int REPORT_VALUE_RESULT_FAILURE = -2;
273    /**
274     * If included in the status bundle sent to an IInstrumentationWatcher, this key
275     * identifies a stack trace describing an error or failure.  This is sent with any status
276     * message describing a specific test being completed.
277     */
278    public static final String REPORT_KEY_STACK = "stack";
279
280    // Default file name for code coverage
281    private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
282
283    private static final String LOG_TAG = "InstrumentationTestRunner";
284
285    private final Bundle mResults = new Bundle();
286    private AndroidTestRunner mTestRunner;
287    private boolean mDebug;
288    private boolean mJustCount;
289    private boolean mSuiteAssignmentMode;
290    private int mTestCount;
291    private String mPackageOfTests;
292    private boolean mCoverage;
293    private String mCoverageFilePath;
294    private int mDelayMsec;
295
296    @Override
297    public void onCreate(Bundle arguments) {
298        super.onCreate(arguments);
299
300        // Apk paths used to search for test classes when using TestSuiteBuilders.
301        String[] apkPaths =
302                {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
303        ClassPathPackageInfoSource.setApkPaths(apkPaths);
304
305        Predicate<TestMethod> testSizePredicate = null;
306        Predicate<TestMethod> testAnnotationPredicate = null;
307        Predicate<TestMethod> testNotAnnotationPredicate = null;
308        boolean includePerformance = false;
309        String testClassesArg = null;
310        boolean logOnly = false;
311
312        if (arguments != null) {
313            // Test class name passed as an argument should override any meta-data declaration.
314            testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
315            mDebug = getBooleanArgument(arguments, "debug");
316            mJustCount = getBooleanArgument(arguments, "count");
317            mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
318            mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
319            testSizePredicate = getSizePredicateFromArg(
320                    arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
321            testAnnotationPredicate = getAnnotationPredicate(
322                    arguments.getString(ARGUMENT_ANNOTATION));
323            testNotAnnotationPredicate = getNotAnnotationPredicate(
324                    arguments.getString(ARGUMENT_NOT_ANNOTATION));
325
326            includePerformance = getBooleanArgument(arguments, ARGUMENT_INCLUDE_PERF);
327            logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
328            mCoverage = getBooleanArgument(arguments, "coverage");
329            mCoverageFilePath = arguments.getString("coverageFile");
330
331            try {
332                Object delay = arguments.get(ARGUMENT_DELAY_MSEC);  // Accept either string or int
333                if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
334            } catch (NumberFormatException e) {
335                Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
336            }
337        }
338
339        TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
340                getTargetContext().getClassLoader());
341
342        if (testSizePredicate != null) {
343            testSuiteBuilder.addRequirements(testSizePredicate);
344        }
345        if (testAnnotationPredicate != null) {
346            testSuiteBuilder.addRequirements(testAnnotationPredicate);
347        }
348        if (testNotAnnotationPredicate != null) {
349            testSuiteBuilder.addRequirements(testNotAnnotationPredicate);
350        }
351        if (!includePerformance) {
352            testSuiteBuilder.addRequirements(REJECT_PERFORMANCE);
353        }
354
355        if (testClassesArg == null) {
356            if (mPackageOfTests != null) {
357                testSuiteBuilder.includePackages(mPackageOfTests);
358            } else {
359                TestSuite testSuite = getTestSuite();
360                if (testSuite != null) {
361                    testSuiteBuilder.addTestSuite(testSuite);
362                } else {
363                    // no package or class bundle arguments were supplied, and no test suite
364                    // provided so add all tests in application
365                    testSuiteBuilder.includePackages("");
366                }
367            }
368        } else {
369            parseTestClasses(testClassesArg, testSuiteBuilder);
370        }
371
372        testSuiteBuilder.addRequirements(getBuilderRequirements());
373
374        mTestRunner = getAndroidTestRunner();
375        mTestRunner.setContext(getTargetContext());
376        mTestRunner.setInstrumentation(this);
377        mTestRunner.setSkipExecution(logOnly);
378        mTestRunner.setTest(testSuiteBuilder.build());
379        mTestCount = mTestRunner.getTestCases().size();
380        if (mSuiteAssignmentMode) {
381            mTestRunner.addTestListener(new SuiteAssignmentPrinter());
382        } else {
383            WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
384            mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
385            mTestRunner.addTestListener(resultPrinter);
386            mTestRunner.setPerformanceResultsWriter(resultPrinter);
387        }
388        start();
389    }
390
391    List<Predicate<TestMethod>> getBuilderRequirements() {
392        return new ArrayList<Predicate<TestMethod>>();
393    }
394
395    /**
396     * Parses and loads the specified set of test classes
397     *
398     * @param testClassArg - comma-separated list of test classes and methods
399     * @param testSuiteBuilder - builder to add tests to
400     */
401    private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
402        String[] testClasses = testClassArg.split(",");
403        for (String testClass : testClasses) {
404            parseTestClass(testClass, testSuiteBuilder);
405        }
406    }
407
408    /**
409     * Parse and load the given test class and, optionally, method
410     *
411     * @param testClassName - full package name of test class and optionally method to add.
412     *        Expected format: com.android.TestClass#testMethod
413     * @param testSuiteBuilder - builder to add tests to
414     */
415    private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
416        int methodSeparatorIndex = testClassName.indexOf('#');
417        String testMethodName = null;
418
419        if (methodSeparatorIndex > 0) {
420            testMethodName = testClassName.substring(methodSeparatorIndex + 1);
421            testClassName = testClassName.substring(0, methodSeparatorIndex);
422        }
423        testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
424    }
425
426    protected AndroidTestRunner getAndroidTestRunner() {
427        return new AndroidTestRunner();
428    }
429
430    private boolean getBooleanArgument(Bundle arguments, String tag) {
431        String tagString = arguments.getString(tag);
432        return tagString != null && Boolean.parseBoolean(tagString);
433    }
434
435    /*
436     * Returns the size predicate object, corresponding to the "size" argument value.
437     */
438    private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
439
440        if (SMALL_SUITE.equals(sizeArg)) {
441            return TestPredicates.SELECT_SMALL;
442        } else if (MEDIUM_SUITE.equals(sizeArg)) {
443            return TestPredicates.SELECT_MEDIUM;
444        } else if (LARGE_SUITE.equals(sizeArg)) {
445            return TestPredicates.SELECT_LARGE;
446        } else {
447            return null;
448        }
449    }
450
451   /**
452    * Returns the test predicate object, corresponding to the annotation class value provided via
453    * the {@link ARGUMENT_ANNOTATION} argument.
454    *
455    * @return the predicate or <code>null</code>
456    */
457    private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) {
458        Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
459        if (annotationClass != null) {
460            return new HasAnnotation(annotationClass);
461        }
462        return null;
463    }
464
465    /**
466     * Returns the negative test predicate object, corresponding to the annotation class value
467     * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument.
468     *
469     * @return the predicate or <code>null</code>
470     */
471     private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) {
472         Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
473         if (annotationClass != null) {
474             return Predicates.not(new HasAnnotation(annotationClass));
475         }
476         return null;
477     }
478
479    /**
480     * Helper method to return the annotation class with specified name
481     *
482     * @param annotationClassName the fully qualified name of the class
483     * @return the annotation class or <code>null</code>
484     */
485    private Class<? extends Annotation> getAnnotationClass(String annotationClassName) {
486        if (annotationClassName == null) {
487            return null;
488        }
489        try {
490           Class<?> annotationClass = Class.forName(annotationClassName);
491           if (annotationClass.isAnnotation()) {
492               return (Class<? extends Annotation>)annotationClass;
493           } else {
494               Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation",
495                       annotationClassName));
496           }
497        } catch (ClassNotFoundException e) {
498            Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s",
499                    annotationClassName));
500        }
501        return null;
502    }
503
504    /**
505     * Initialize the current thread as a looper.
506     * <p/>
507     * Exposed for unit testing.
508     */
509    void prepareLooper() {
510        Looper.prepare();
511    }
512
513    @Override
514    public void onStart() {
515        prepareLooper();
516
517        if (mJustCount) {
518            mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
519            mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
520            finish(Activity.RESULT_OK, mResults);
521        } else {
522            if (mDebug) {
523                Debug.waitForDebugger();
524            }
525
526            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
527            PrintStream writer = new PrintStream(byteArrayOutputStream);
528            try {
529                StringResultPrinter resultPrinter = new StringResultPrinter(writer);
530
531                mTestRunner.addTestListener(resultPrinter);
532
533                long startTime = System.currentTimeMillis();
534                mTestRunner.runTest();
535                long runTime = System.currentTimeMillis() - startTime;
536
537                resultPrinter.print(mTestRunner.getTestResult(), runTime);
538            } catch (Throwable t) {
539                // catch all exceptions so a more verbose error message can be outputted
540                writer.println(String.format("Test run aborted due to unexpected exception: %s",
541                                t.getMessage()));
542                t.printStackTrace(writer);
543            } finally {
544                mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
545                        String.format("\nTest results for %s=%s",
546                        mTestRunner.getTestClassName(),
547                        byteArrayOutputStream.toString()));
548
549                if (mCoverage) {
550                    generateCoverageReport();
551                }
552                writer.close();
553
554                finish(Activity.RESULT_OK, mResults);
555            }
556        }
557    }
558
559    public TestSuite getTestSuite() {
560        return getAllTests();
561    }
562
563    /**
564     * Override this to define all of the tests to run in your package.
565     */
566    public TestSuite getAllTests() {
567        return null;
568    }
569
570    /**
571     * Override this to provide access to the class loader of your package.
572     */
573    public ClassLoader getLoader() {
574        return null;
575    }
576
577    private void generateCoverageReport() {
578        // use reflection to call emma dump coverage method, to avoid
579        // always statically compiling against emma jar
580        String coverageFilePath = getCoverageFilePath();
581        java.io.File coverageFile = new java.io.File(coverageFilePath);
582        try {
583            Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
584            Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
585                    coverageFile.getClass(), boolean.class, boolean.class);
586
587            dumpCoverageMethod.invoke(null, coverageFile, false, false);
588            // output path to generated coverage file so it can be parsed by a test harness if
589            // needed
590            mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
591            // also output a more user friendly msg
592            final String currentStream = mResults.getString(
593                    Instrumentation.REPORT_KEY_STREAMRESULT);
594            mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
595                String.format("%s\nGenerated code coverage data to %s", currentStream,
596                coverageFilePath));
597        } catch (ClassNotFoundException e) {
598            reportEmmaError("Is emma jar on classpath?", e);
599        } catch (SecurityException e) {
600            reportEmmaError(e);
601        } catch (NoSuchMethodException e) {
602            reportEmmaError(e);
603        } catch (IllegalArgumentException e) {
604            reportEmmaError(e);
605        } catch (IllegalAccessException e) {
606            reportEmmaError(e);
607        } catch (InvocationTargetException e) {
608            reportEmmaError(e);
609        }
610    }
611
612    private String getCoverageFilePath() {
613        if (mCoverageFilePath == null) {
614            return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
615                   DEFAULT_COVERAGE_FILE_NAME;
616        } else {
617            return mCoverageFilePath;
618        }
619    }
620
621    private void reportEmmaError(Exception e) {
622        reportEmmaError("", e);
623    }
624
625    private void reportEmmaError(String hint, Exception e) {
626        String msg = "Failed to generate emma coverage. " + hint;
627        Log.e(LOG_TAG, msg, e);
628        mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
629    }
630
631    // TODO kill this, use status() and prettyprint model for better output
632    private class StringResultPrinter extends ResultPrinter {
633
634        public StringResultPrinter(PrintStream writer) {
635            super(writer);
636        }
637
638        synchronized void print(TestResult result, long runTime) {
639            printHeader(runTime);
640            printFooter(result);
641        }
642    }
643
644    /**
645     * This class sends status reports back to the IInstrumentationWatcher about
646     * which suite each test belongs.
647     */
648    private class SuiteAssignmentPrinter implements TestListener {
649
650        private Bundle mTestResult;
651        private long mStartTime;
652        private long mEndTime;
653        private boolean mTimingValid;
654
655        public SuiteAssignmentPrinter() {
656        }
657
658        /**
659         * send a status for the start of a each test, so long tests can be seen as "running"
660         */
661        public void startTest(Test test) {
662            mTimingValid = true;
663            mStartTime = System.currentTimeMillis();
664        }
665
666        /**
667         * @see junit.framework.TestListener#addError(Test, Throwable)
668         */
669        public void addError(Test test, Throwable t) {
670            mTimingValid = false;
671        }
672
673        /**
674         * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
675         */
676        public void addFailure(Test test, AssertionFailedError t) {
677            mTimingValid = false;
678        }
679
680        /**
681         * @see junit.framework.TestListener#endTest(Test)
682         */
683        public void endTest(Test test) {
684            float runTime;
685            String assignmentSuite;
686            mEndTime = System.currentTimeMillis();
687            mTestResult = new Bundle();
688
689            if (!mTimingValid || mStartTime < 0) {
690                assignmentSuite = "NA";
691                runTime = -1;
692            } else {
693                runTime = mEndTime - mStartTime;
694                if (runTime < SMALL_SUITE_MAX_RUNTIME
695                        && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
696                    assignmentSuite = SMALL_SUITE;
697                } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
698                    assignmentSuite = MEDIUM_SUITE;
699                } else {
700                    assignmentSuite = LARGE_SUITE;
701                }
702            }
703            // Clear mStartTime so that we can verify that it gets set next time.
704            mStartTime = -1;
705
706            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
707                    test.getClass().getName() + "#" + ((TestCase) test).getName()
708                    + "\nin " + assignmentSuite + " suite\nrunTime: "
709                    + String.valueOf(runTime) + "\n");
710            mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
711            mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
712
713            sendStatus(0, mTestResult);
714        }
715    }
716
717    /**
718     * This class sends status reports back to the IInstrumentationWatcher
719     */
720    private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
721        private final Bundle mResultTemplate;
722        Bundle mTestResult;
723        int mTestNum = 0;
724        int mTestResultCode = 0;
725        String mTestClass = null;
726        PerformanceCollector mPerfCollector = new PerformanceCollector();
727        boolean mIsTimedTest = false;
728        boolean mIncludeDetailedStats = false;
729
730        public WatcherResultPrinter(int numTests) {
731            mResultTemplate = new Bundle();
732            mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
733            mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
734        }
735
736        /**
737         * send a status for the start of a each test, so long tests can be seen
738         * as "running"
739         */
740        public void startTest(Test test) {
741            String testClass = test.getClass().getName();
742            String testName = ((TestCase)test).getName();
743            mTestResult = new Bundle(mResultTemplate);
744            mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
745            mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
746            mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
747            // pretty printing
748            if (testClass != null && !testClass.equals(mTestClass)) {
749                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
750                        String.format("\n%s:", testClass));
751                mTestClass = testClass;
752            } else {
753                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
754            }
755
756            Method testMethod = null;
757            try {
758                testMethod = test.getClass().getMethod(testName);
759                // Report total number of iterations, if test is repetitive
760                if (testMethod.isAnnotationPresent(RepetitiveTest.class)) {
761                    int numIterations = testMethod.getAnnotation(
762                        RepetitiveTest.class).numIterations();
763                    mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations);
764                }
765            } catch (NoSuchMethodException e) {
766                // ignore- the test with given name does not exist. Will be handled during test
767                // execution
768            }
769
770            // The delay_msec parameter is normally used to provide buffers of idle time
771            // for power measurement purposes. To make sure there is a delay before and after
772            // every test in a suite, we delay *after* every test (see endTest below) and also
773            // delay *before* the first test. So, delay test1 delay test2 delay.
774
775            try {
776                if (mTestNum == 1) Thread.sleep(mDelayMsec);
777            } catch (InterruptedException e) {
778                throw new IllegalStateException(e);
779            }
780
781            sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
782            mTestResultCode = 0;
783
784            mIsTimedTest = false;
785            mIncludeDetailedStats = false;
786            try {
787                // Look for TimedTest annotation on both test class and test method
788                if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) {
789                    mIsTimedTest = true;
790                    mIncludeDetailedStats = testMethod.getAnnotation(
791                            TimedTest.class).includeDetailedStats();
792                } else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
793                    mIsTimedTest = true;
794                    mIncludeDetailedStats = test.getClass().getAnnotation(
795                            TimedTest.class).includeDetailedStats();
796                }
797            } catch (SecurityException e) {
798                // ignore - the test with given name cannot be accessed. Will be handled during
799                // test execution
800            }
801
802            if (mIsTimedTest && mIncludeDetailedStats) {
803                mPerfCollector.beginSnapshot("");
804            } else if (mIsTimedTest) {
805                mPerfCollector.startTiming("");
806            }
807        }
808
809        /**
810         * @see junit.framework.TestListener#addError(Test, Throwable)
811         */
812        public void addError(Test test, Throwable t) {
813            mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
814            mTestResultCode = REPORT_VALUE_RESULT_ERROR;
815            // pretty printing
816            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
817                String.format("\nError in %s:\n%s",
818                    ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
819        }
820
821        /**
822         * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
823         */
824        public void addFailure(Test test, AssertionFailedError t) {
825            mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
826            mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
827            // pretty printing
828            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
829                String.format("\nFailure in %s:\n%s",
830                    ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
831        }
832
833        /**
834         * @see junit.framework.TestListener#endTest(Test)
835         */
836        public void endTest(Test test) {
837            if (mIsTimedTest && mIncludeDetailedStats) {
838                mTestResult.putAll(mPerfCollector.endSnapshot());
839            } else if (mIsTimedTest) {
840                writeStopTiming(mPerfCollector.stopTiming(""));
841            }
842
843            if (mTestResultCode == 0) {
844                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
845            }
846            sendStatus(mTestResultCode, mTestResult);
847
848            try { // Sleep after every test, if specified
849                Thread.sleep(mDelayMsec);
850            } catch (InterruptedException e) {
851                throw new IllegalStateException(e);
852            }
853        }
854
855        public void writeBeginSnapshot(String label) {
856            // Do nothing
857        }
858
859        public void writeEndSnapshot(Bundle results) {
860            // Copy all snapshot data fields into mResults, which is outputted
861            // via Instrumentation.finish
862            mResults.putAll(results);
863        }
864
865        public void writeStartTiming(String label) {
866            // Do nothing
867        }
868
869        public void writeStopTiming(Bundle results) {
870            // Copy results into mTestResult by flattening list of iterations,
871            // which is outputted via WatcherResultPrinter.endTest
872            int i = 0;
873            for (Parcelable p :
874                    results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
875                Bundle iteration = (Bundle)p;
876                String index = "iteration" + i + ".";
877                mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
878                        iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
879                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
880                        iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
881                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
882                        iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
883                i++;
884            }
885        }
886
887        public void writeMeasurement(String label, long value) {
888            mTestResult.putLong(label, value);
889        }
890
891        public void writeMeasurement(String label, float value) {
892            mTestResult.putFloat(label, value);
893        }
894
895        public void writeMeasurement(String label, String value) {
896            mTestResult.putString(label, value);
897        }
898
899        // TODO report the end of the cycle
900    }
901}
902