InstrumentationTestRunner.java revision cbc584d7ebe332f78bf8012d21265ccb482c7cfc
1/*
2 * Copyright (C) 2007 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.test;
18
19import com.android.internal.util.Predicate;
20import com.android.internal.util.Predicates;
21
22import android.app.Activity;
23import android.app.Instrumentation;
24import android.os.Bundle;
25import android.os.Debug;
26import android.os.Looper;
27import android.os.Parcelable;
28import android.os.PerformanceCollector;
29import android.os.PerformanceCollector.PerformanceResultsWriter;
30import android.test.suitebuilder.TestMethod;
31import android.test.suitebuilder.TestPredicates;
32import android.test.suitebuilder.TestSuiteBuilder;
33import android.test.suitebuilder.annotation.HasAnnotation;
34import android.test.suitebuilder.annotation.LargeTest;
35import android.util.Log;
36
37import java.io.ByteArrayOutputStream;
38import java.io.File;
39import java.io.PrintStream;
40import java.lang.annotation.Annotation;
41import java.lang.reflect.InvocationTargetException;
42import java.lang.reflect.Method;
43import java.util.ArrayList;
44import java.util.List;
45
46import junit.framework.AssertionFailedError;
47import junit.framework.Test;
48import junit.framework.TestCase;
49import junit.framework.TestListener;
50import junit.framework.TestResult;
51import junit.framework.TestSuite;
52import junit.runner.BaseTestRunner;
53import junit.textui.ResultPrinter;
54
55/**
56 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
57 * an Android package (application). Typical usage:
58 * <ol>
59 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
60 * against the classes in your package.  Typically these are subclassed from:
61 *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
62 *   <li>{@link android.test.ActivityUnitTestCase}</li>
63 *   <li>{@link android.test.AndroidTestCase}</li>
64 *   <li>{@link android.test.ApplicationTestCase}</li>
65 *   <li>{@link android.test.InstrumentationTestCase}</li>
66 *   <li>{@link android.test.ProviderTestCase}</li>
67 *   <li>{@link android.test.ServiceTestCase}</li>
68 *   <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul>
69 *   <li>In an appropriate AndroidManifest.xml, define the this instrumentation with
70 * the appropriate android:targetPackage set.
71 * <li>Run the instrumentation using "adb shell am instrument -w",
72 * with no optional arguments, to run all tests (except performance tests).
73 * <li>Run the instrumentation using "adb shell am instrument -w",
74 * with the argument '-e func true' to run all functional tests. These are tests that derive from
75 * {@link android.test.InstrumentationTestCase}.
76 * <li>Run the instrumentation using "adb shell am instrument -w",
77 * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive
78 * from {@link android.test.InstrumentationTestCase} (and are not performance tests).
79 * <li>Run the instrumentation using "adb shell am instrument -w",
80 * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}.
81 * </ol>
82 * <p/>
83 * <b>Running all tests:</b> adb shell am instrument -w
84 * com.android.foo/android.test.InstrumentationTestRunner
85 * <p/>
86 * <b>Running all small tests:</b> adb shell am instrument -w
87 * -e size small
88 * com.android.foo/android.test.InstrumentationTestRunner
89 * <p/>
90 * <b>Running all medium tests:</b> adb shell am instrument -w
91 * -e size medium
92 * com.android.foo/android.test.InstrumentationTestRunner
93 * <p/>
94 * <b>Running all large tests:</b> adb shell am instrument -w
95 * -e size large
96 * com.android.foo/android.test.InstrumentationTestRunner
97 * <p/>
98 * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w
99 * -e annotation com.android.foo.MyAnnotation
100 * com.android.foo/android.test.InstrumentationTestRunner
101 * <p/>
102 * If used with other options, the resulting test run will contain the union of the two options.
103 * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both
104 * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations.
105 * <p/>
106 * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w
107 * -e notAnnotation com.android.foo.MyAnnotation
108 * com.android.foo/android.test.InstrumentationTestRunner
109 * <p/>
110 * <b>Running a single testcase:</b> adb shell am instrument -w
111 * -e class com.android.foo.FooTest
112 * com.android.foo/android.test.InstrumentationTestRunner
113 * <p/>
114 * <b>Running a single test:</b> adb shell am instrument -w
115 * -e class com.android.foo.FooTest#testFoo
116 * com.android.foo/android.test.InstrumentationTestRunner
117 * <p/>
118 * <b>Running multiple tests:</b> adb shell am instrument -w
119 * -e class com.android.foo.FooTest,com.android.foo.TooTest
120 * com.android.foo/android.test.InstrumentationTestRunner
121 * <p/>
122 * <b>Running all tests in a java package:</b> adb shell am instrument -w
123 * -e package com.android.foo.subpkg
124 *  com.android.foo/android.test.InstrumentationTestRunner
125 * <p/>
126 * <b>Including performance tests:</b> adb shell am instrument -w
127 * -e perf true
128 * com.android.foo/android.test.InstrumentationTestRunner
129 * <p/>
130 * <b>To debug your tests, set a break point in your code and pass:</b>
131 * -e debug true
132 * <p/>
133 * <b>To run in 'log only' mode</b>
134 * -e log true
135 * This option will load and iterate through all test classes and methods, but will bypass actual
136 * test execution. Useful for quickly obtaining info on the tests to be executed by an
137 * instrumentation command.
138 * <p/>
139 * <b>To generate EMMA code coverage:</b>
140 * -e coverage true
141 * Note: this requires an emma instrumented build. By default, the code coverage results file
142 * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
143 * below)
144 * <p/>
145 * <b> To specify EMMA code coverage results file path:</b>
146 * -e coverageFile /sdcard/myFile.ec
147 * <br/>
148 * in addition to the other arguments.
149 */
150
151/* (not JavaDoc)
152 * Although not necessary in most case, another way to use this class is to extend it and have the
153 * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
154 * suite returned from this method will be used if no target class is defined in the meta-data or
155 * command line argument parameters. If a derived class is used it needs to be added as an
156 * instrumentation to the AndroidManifest.xml and the command to run it would look like:
157 * <p/>
158 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
159 * <p/>
160 * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class.
161 *
162 * This model is used by many existing app tests, but can probably be deprecated.
163 */
164public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider {
165
166    /** @hide */
167    public static final String ARGUMENT_TEST_CLASS = "class";
168    /** @hide */
169    public static final String ARGUMENT_TEST_PACKAGE = "package";
170    /** @hide */
171    public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size";
172    /** @hide */
173    public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
174
175    private static final String SMALL_SUITE = "small";
176    private static final String MEDIUM_SUITE = "medium";
177    private static final String LARGE_SUITE = "large";
178
179    private static final String ARGUMENT_LOG_ONLY = "log";
180    /** @hide */
181    static final String ARGUMENT_ANNOTATION = "annotation";
182    /** @hide */
183    static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation";
184
185    /**
186     * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
187     * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
188     */
189    private static final float SMALL_SUITE_MAX_RUNTIME = 100;
190
191    /**
192     * This constant defines the maximum allowed runtime (in ms) for a test included in the
193     * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
194     */
195    private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
196
197    /**
198     * The following keys are used in the status bundle to provide structured reports to
199     * an IInstrumentationWatcher.
200     */
201
202    /**
203     * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
204     * identifies InstrumentationTestRunner as the source of the report.  This is sent with all
205     * status messages.
206     */
207    public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
208    /**
209     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
210     * identifies the total number of tests that are being run.  This is sent with all status
211     * messages.
212     */
213    public static final String REPORT_KEY_NUM_TOTAL = "numtests";
214    /**
215     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
216     * identifies the sequence number of the current test.  This is sent with any status message
217     * describing a specific test being started or completed.
218     */
219    public static final String REPORT_KEY_NUM_CURRENT = "current";
220    /**
221     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
222     * identifies the name of the current test class.  This is sent with any status message
223     * describing a specific test being started or completed.
224     */
225    public static final String REPORT_KEY_NAME_CLASS = "class";
226    /**
227     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
228     * identifies the name of the current test.  This is sent with any status message
229     * describing a specific test being started or completed.
230     */
231    public static final String REPORT_KEY_NAME_TEST = "test";
232    /**
233     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
234     * reports the run time in seconds of the current test.
235     */
236    private static final String REPORT_KEY_RUN_TIME = "runtime";
237    /**
238     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
239     * reports the number of total iterations of the current test.
240     */
241    private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations";
242    /**
243     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
244     * reports the guessed suite assignment for the current test.
245     */
246    private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
247    /**
248     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
249     * identifies the path to the generated code coverage file.
250     */
251    private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
252
253    /**
254     * The test is starting.
255     */
256    public static final int REPORT_VALUE_RESULT_START = 1;
257    /**
258     * The test completed successfully.
259     */
260    public static final int REPORT_VALUE_RESULT_OK = 0;
261    /**
262     * The test completed with an error.
263     */
264    public static final int REPORT_VALUE_RESULT_ERROR = -1;
265    /**
266     * The test completed with a failure.
267     */
268    public static final int REPORT_VALUE_RESULT_FAILURE = -2;
269    /**
270     * If included in the status bundle sent to an IInstrumentationWatcher, this key
271     * identifies a stack trace describing an error or failure.  This is sent with any status
272     * message describing a specific test being completed.
273     */
274    public static final String REPORT_KEY_STACK = "stack";
275
276    // Default file name for code coverage
277    private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
278
279    private static final String LOG_TAG = "InstrumentationTestRunner";
280
281    private final Bundle mResults = new Bundle();
282    private Bundle mArguments;
283    private AndroidTestRunner mTestRunner;
284    private boolean mDebug;
285    private boolean mJustCount;
286    private boolean mSuiteAssignmentMode;
287    private int mTestCount;
288    private String mPackageOfTests;
289    private boolean mCoverage;
290    private String mCoverageFilePath;
291    private int mDelayMsec;
292
293    @Override
294    public void onCreate(Bundle arguments) {
295        super.onCreate(arguments);
296        mArguments = arguments;
297
298        // Apk paths used to search for test classes when using TestSuiteBuilders.
299        String[] apkPaths =
300                {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()};
301        ClassPathPackageInfoSource.setApkPaths(apkPaths);
302
303        Predicate<TestMethod> testSizePredicate = null;
304        Predicate<TestMethod> testAnnotationPredicate = null;
305        Predicate<TestMethod> testNotAnnotationPredicate = null;
306        String testClassesArg = null;
307        boolean logOnly = false;
308
309        if (arguments != null) {
310            // Test class name passed as an argument should override any meta-data declaration.
311            testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS);
312            mDebug = getBooleanArgument(arguments, "debug");
313            mJustCount = getBooleanArgument(arguments, "count");
314            mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment");
315            mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE);
316            testSizePredicate = getSizePredicateFromArg(
317                    arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE));
318            testAnnotationPredicate = getAnnotationPredicate(
319                    arguments.getString(ARGUMENT_ANNOTATION));
320            testNotAnnotationPredicate = getNotAnnotationPredicate(
321                    arguments.getString(ARGUMENT_NOT_ANNOTATION));
322
323            logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY);
324            mCoverage = getBooleanArgument(arguments, "coverage");
325            mCoverageFilePath = arguments.getString("coverageFile");
326
327            try {
328                Object delay = arguments.get(ARGUMENT_DELAY_MSEC);  // Accept either string or int
329                if (delay != null) mDelayMsec = Integer.parseInt(delay.toString());
330            } catch (NumberFormatException e) {
331                Log.e(LOG_TAG, "Invalid delay_msec parameter", e);
332            }
333        }
334
335        TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(),
336                getTargetContext().getClassLoader());
337
338        if (testSizePredicate != null) {
339            testSuiteBuilder.addRequirements(testSizePredicate);
340        }
341        if (testAnnotationPredicate != null) {
342            testSuiteBuilder.addRequirements(testAnnotationPredicate);
343        }
344        if (testNotAnnotationPredicate != null) {
345            testSuiteBuilder.addRequirements(testNotAnnotationPredicate);
346        }
347
348        if (testClassesArg == null) {
349            if (mPackageOfTests != null) {
350                testSuiteBuilder.includePackages(mPackageOfTests);
351            } else {
352                TestSuite testSuite = getTestSuite();
353                if (testSuite != null) {
354                    testSuiteBuilder.addTestSuite(testSuite);
355                } else {
356                    // no package or class bundle arguments were supplied, and no test suite
357                    // provided so add all tests in application
358                    testSuiteBuilder.includePackages("");
359                }
360            }
361        } else {
362            parseTestClasses(testClassesArg, testSuiteBuilder);
363        }
364
365        testSuiteBuilder.addRequirements(getBuilderRequirements());
366
367        mTestRunner = getAndroidTestRunner();
368        mTestRunner.setContext(getTargetContext());
369        mTestRunner.setInstrumentation(this);
370        mTestRunner.setSkipExecution(logOnly);
371        mTestRunner.setTest(testSuiteBuilder.build());
372        mTestCount = mTestRunner.getTestCases().size();
373        if (mSuiteAssignmentMode) {
374            mTestRunner.addTestListener(new SuiteAssignmentPrinter());
375        } else {
376            WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
377            mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
378            mTestRunner.addTestListener(resultPrinter);
379            mTestRunner.setPerformanceResultsWriter(resultPrinter);
380        }
381        start();
382    }
383
384    /**
385     * Get the Bundle object that contains the arguments
386     *
387     * @return the Bundle object
388     * @hide
389     */
390    public Bundle getBundle(){
391        return mArguments;
392    }
393
394    List<Predicate<TestMethod>> getBuilderRequirements() {
395        return new ArrayList<Predicate<TestMethod>>();
396    }
397
398    /**
399     * Parses and loads the specified set of test classes
400     *
401     * @param testClassArg - comma-separated list of test classes and methods
402     * @param testSuiteBuilder - builder to add tests to
403     */
404    private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) {
405        String[] testClasses = testClassArg.split(",");
406        for (String testClass : testClasses) {
407            parseTestClass(testClass, testSuiteBuilder);
408        }
409    }
410
411    /**
412     * Parse and load the given test class and, optionally, method
413     *
414     * @param testClassName - full package name of test class and optionally method to add.
415     *        Expected format: com.android.TestClass#testMethod
416     * @param testSuiteBuilder - builder to add tests to
417     */
418    private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
419        int methodSeparatorIndex = testClassName.indexOf('#');
420        String testMethodName = null;
421
422        if (methodSeparatorIndex > 0) {
423            testMethodName = testClassName.substring(methodSeparatorIndex + 1);
424            testClassName = testClassName.substring(0, methodSeparatorIndex);
425        }
426        testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
427    }
428
429    protected AndroidTestRunner getAndroidTestRunner() {
430        return new AndroidTestRunner();
431    }
432
433    private boolean getBooleanArgument(Bundle arguments, String tag) {
434        String tagString = arguments.getString(tag);
435        return tagString != null && Boolean.parseBoolean(tagString);
436    }
437
438    /*
439     * Returns the size predicate object, corresponding to the "size" argument value.
440     */
441    private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
442
443        if (SMALL_SUITE.equals(sizeArg)) {
444            return TestPredicates.SELECT_SMALL;
445        } else if (MEDIUM_SUITE.equals(sizeArg)) {
446            return TestPredicates.SELECT_MEDIUM;
447        } else if (LARGE_SUITE.equals(sizeArg)) {
448            return TestPredicates.SELECT_LARGE;
449        } else {
450            return null;
451        }
452    }
453
454   /**
455    * Returns the test predicate object, corresponding to the annotation class value provided via
456    * the {@link ARGUMENT_ANNOTATION} argument.
457    *
458    * @return the predicate or <code>null</code>
459    */
460    private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) {
461        Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
462        if (annotationClass != null) {
463            return new HasAnnotation(annotationClass);
464        }
465        return null;
466    }
467
468    /**
469     * Returns the negative test predicate object, corresponding to the annotation class value
470     * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument.
471     *
472     * @return the predicate or <code>null</code>
473     */
474     private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) {
475         Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName);
476         if (annotationClass != null) {
477             return Predicates.not(new HasAnnotation(annotationClass));
478         }
479         return null;
480     }
481
482    /**
483     * Helper method to return the annotation class with specified name
484     *
485     * @param annotationClassName the fully qualified name of the class
486     * @return the annotation class or <code>null</code>
487     */
488    private Class<? extends Annotation> getAnnotationClass(String annotationClassName) {
489        if (annotationClassName == null) {
490            return null;
491        }
492        try {
493           Class<?> annotationClass = Class.forName(annotationClassName);
494           if (annotationClass.isAnnotation()) {
495               return (Class<? extends Annotation>)annotationClass;
496           } else {
497               Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation",
498                       annotationClassName));
499           }
500        } catch (ClassNotFoundException e) {
501            Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s",
502                    annotationClassName));
503        }
504        return null;
505    }
506
507    /**
508     * Initialize the current thread as a looper.
509     * <p/>
510     * Exposed for unit testing.
511     */
512    void prepareLooper() {
513        Looper.prepare();
514    }
515
516    @Override
517    public void onStart() {
518        prepareLooper();
519
520        if (mJustCount) {
521            mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
522            mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
523            finish(Activity.RESULT_OK, mResults);
524        } else {
525            if (mDebug) {
526                Debug.waitForDebugger();
527            }
528
529            ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
530            PrintStream writer = new PrintStream(byteArrayOutputStream);
531            try {
532                StringResultPrinter resultPrinter = new StringResultPrinter(writer);
533
534                mTestRunner.addTestListener(resultPrinter);
535
536                long startTime = System.currentTimeMillis();
537                mTestRunner.runTest();
538                long runTime = System.currentTimeMillis() - startTime;
539
540                resultPrinter.print(mTestRunner.getTestResult(), runTime);
541            } catch (Throwable t) {
542                // catch all exceptions so a more verbose error message can be outputted
543                writer.println(String.format("Test run aborted due to unexpected exception: %s",
544                                t.getMessage()));
545                t.printStackTrace(writer);
546            } finally {
547                mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
548                        String.format("\nTest results for %s=%s",
549                        mTestRunner.getTestClassName(),
550                        byteArrayOutputStream.toString()));
551
552                if (mCoverage) {
553                    generateCoverageReport();
554                }
555                writer.close();
556
557                finish(Activity.RESULT_OK, mResults);
558            }
559        }
560    }
561
562    public TestSuite getTestSuite() {
563        return getAllTests();
564    }
565
566    /**
567     * Override this to define all of the tests to run in your package.
568     */
569    public TestSuite getAllTests() {
570        return null;
571    }
572
573    /**
574     * Override this to provide access to the class loader of your package.
575     */
576    public ClassLoader getLoader() {
577        return null;
578    }
579
580    private void generateCoverageReport() {
581        // use reflection to call emma dump coverage method, to avoid
582        // always statically compiling against emma jar
583        String coverageFilePath = getCoverageFilePath();
584        java.io.File coverageFile = new java.io.File(coverageFilePath);
585        try {
586            Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
587            Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
588                    coverageFile.getClass(), boolean.class, boolean.class);
589
590            dumpCoverageMethod.invoke(null, coverageFile, false, false);
591            // output path to generated coverage file so it can be parsed by a test harness if
592            // needed
593            mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath);
594            // also output a more user friendly msg
595            final String currentStream = mResults.getString(
596                    Instrumentation.REPORT_KEY_STREAMRESULT);
597            mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
598                String.format("%s\nGenerated code coverage data to %s", currentStream,
599                coverageFilePath));
600        } catch (ClassNotFoundException e) {
601            reportEmmaError("Is emma jar on classpath?", e);
602        } catch (SecurityException e) {
603            reportEmmaError(e);
604        } catch (NoSuchMethodException e) {
605            reportEmmaError(e);
606        } catch (IllegalArgumentException e) {
607            reportEmmaError(e);
608        } catch (IllegalAccessException e) {
609            reportEmmaError(e);
610        } catch (InvocationTargetException e) {
611            reportEmmaError(e);
612        }
613    }
614
615    private String getCoverageFilePath() {
616        if (mCoverageFilePath == null) {
617            return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
618                   DEFAULT_COVERAGE_FILE_NAME;
619        } else {
620            return mCoverageFilePath;
621        }
622    }
623
624    private void reportEmmaError(Exception e) {
625        reportEmmaError("", e);
626    }
627
628    private void reportEmmaError(String hint, Exception e) {
629        String msg = "Failed to generate emma coverage. " + hint;
630        Log.e(LOG_TAG, msg, e);
631        mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg);
632    }
633
634    // TODO kill this, use status() and prettyprint model for better output
635    private class StringResultPrinter extends ResultPrinter {
636
637        public StringResultPrinter(PrintStream writer) {
638            super(writer);
639        }
640
641        synchronized void print(TestResult result, long runTime) {
642            printHeader(runTime);
643            printFooter(result);
644        }
645    }
646
647    /**
648     * This class sends status reports back to the IInstrumentationWatcher about
649     * which suite each test belongs.
650     */
651    private class SuiteAssignmentPrinter implements TestListener {
652
653        private Bundle mTestResult;
654        private long mStartTime;
655        private long mEndTime;
656        private boolean mTimingValid;
657
658        public SuiteAssignmentPrinter() {
659        }
660
661        /**
662         * send a status for the start of a each test, so long tests can be seen as "running"
663         */
664        public void startTest(Test test) {
665            mTimingValid = true;
666            mStartTime = System.currentTimeMillis();
667        }
668
669        /**
670         * @see junit.framework.TestListener#addError(Test, Throwable)
671         */
672        public void addError(Test test, Throwable t) {
673            mTimingValid = false;
674        }
675
676        /**
677         * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
678         */
679        public void addFailure(Test test, AssertionFailedError t) {
680            mTimingValid = false;
681        }
682
683        /**
684         * @see junit.framework.TestListener#endTest(Test)
685         */
686        public void endTest(Test test) {
687            float runTime;
688            String assignmentSuite;
689            mEndTime = System.currentTimeMillis();
690            mTestResult = new Bundle();
691
692            if (!mTimingValid || mStartTime < 0) {
693                assignmentSuite = "NA";
694                runTime = -1;
695            } else {
696                runTime = mEndTime - mStartTime;
697                if (runTime < SMALL_SUITE_MAX_RUNTIME
698                        && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
699                    assignmentSuite = SMALL_SUITE;
700                } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
701                    assignmentSuite = MEDIUM_SUITE;
702                } else {
703                    assignmentSuite = LARGE_SUITE;
704                }
705            }
706            // Clear mStartTime so that we can verify that it gets set next time.
707            mStartTime = -1;
708
709            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
710                    test.getClass().getName() + "#" + ((TestCase) test).getName()
711                    + "\nin " + assignmentSuite + " suite\nrunTime: "
712                    + String.valueOf(runTime) + "\n");
713            mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
714            mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite);
715
716            sendStatus(0, mTestResult);
717        }
718    }
719
720    /**
721     * This class sends status reports back to the IInstrumentationWatcher
722     */
723    private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
724        private final Bundle mResultTemplate;
725        Bundle mTestResult;
726        int mTestNum = 0;
727        int mTestResultCode = 0;
728        String mTestClass = null;
729        PerformanceCollector mPerfCollector = new PerformanceCollector();
730        boolean mIsTimedTest = false;
731        boolean mIncludeDetailedStats = false;
732
733        public WatcherResultPrinter(int numTests) {
734            mResultTemplate = new Bundle();
735            mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
736            mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
737        }
738
739        /**
740         * send a status for the start of a each test, so long tests can be seen
741         * as "running"
742         */
743        public void startTest(Test test) {
744            String testClass = test.getClass().getName();
745            String testName = ((TestCase)test).getName();
746            mTestResult = new Bundle(mResultTemplate);
747            mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
748            mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
749            mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
750            // pretty printing
751            if (testClass != null && !testClass.equals(mTestClass)) {
752                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
753                        String.format("\n%s:", testClass));
754                mTestClass = testClass;
755            } else {
756                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "");
757            }
758
759            Method testMethod = null;
760            try {
761                testMethod = test.getClass().getMethod(testName);
762                // Report total number of iterations, if test is repetitive
763                if (testMethod.isAnnotationPresent(RepetitiveTest.class)) {
764                    int numIterations = testMethod.getAnnotation(
765                        RepetitiveTest.class).numIterations();
766                    mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations);
767                }
768            } catch (NoSuchMethodException e) {
769                // ignore- the test with given name does not exist. Will be handled during test
770                // execution
771            }
772
773            // The delay_msec parameter is normally used to provide buffers of idle time
774            // for power measurement purposes. To make sure there is a delay before and after
775            // every test in a suite, we delay *after* every test (see endTest below) and also
776            // delay *before* the first test. So, delay test1 delay test2 delay.
777
778            try {
779                if (mTestNum == 1) Thread.sleep(mDelayMsec);
780            } catch (InterruptedException e) {
781                throw new IllegalStateException(e);
782            }
783
784            sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
785            mTestResultCode = 0;
786
787            mIsTimedTest = false;
788            mIncludeDetailedStats = false;
789            try {
790                // Look for TimedTest annotation on both test class and test method
791                if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) {
792                    mIsTimedTest = true;
793                    mIncludeDetailedStats = testMethod.getAnnotation(
794                            TimedTest.class).includeDetailedStats();
795                } else if (test.getClass().isAnnotationPresent(TimedTest.class)) {
796                    mIsTimedTest = true;
797                    mIncludeDetailedStats = test.getClass().getAnnotation(
798                            TimedTest.class).includeDetailedStats();
799                }
800            } catch (SecurityException e) {
801                // ignore - the test with given name cannot be accessed. Will be handled during
802                // test execution
803            }
804
805            if (mIsTimedTest && mIncludeDetailedStats) {
806                mPerfCollector.beginSnapshot("");
807            } else if (mIsTimedTest) {
808                mPerfCollector.startTiming("");
809            }
810        }
811
812        /**
813         * @see junit.framework.TestListener#addError(Test, Throwable)
814         */
815        public void addError(Test test, Throwable t) {
816            mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
817            mTestResultCode = REPORT_VALUE_RESULT_ERROR;
818            // pretty printing
819            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
820                String.format("\nError in %s:\n%s",
821                    ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
822        }
823
824        /**
825         * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError)
826         */
827        public void addFailure(Test test, AssertionFailedError t) {
828            mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
829            mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
830            // pretty printing
831            mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
832                String.format("\nFailure in %s:\n%s",
833                    ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
834        }
835
836        /**
837         * @see junit.framework.TestListener#endTest(Test)
838         */
839        public void endTest(Test test) {
840            if (mIsTimedTest && mIncludeDetailedStats) {
841                mTestResult.putAll(mPerfCollector.endSnapshot());
842            } else if (mIsTimedTest) {
843                writeStopTiming(mPerfCollector.stopTiming(""));
844            }
845
846            if (mTestResultCode == 0) {
847                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
848            }
849            sendStatus(mTestResultCode, mTestResult);
850
851            try { // Sleep after every test, if specified
852                Thread.sleep(mDelayMsec);
853            } catch (InterruptedException e) {
854                throw new IllegalStateException(e);
855            }
856        }
857
858        public void writeBeginSnapshot(String label) {
859            // Do nothing
860        }
861
862        public void writeEndSnapshot(Bundle results) {
863            // Copy all snapshot data fields into mResults, which is outputted
864            // via Instrumentation.finish
865            mResults.putAll(results);
866        }
867
868        public void writeStartTiming(String label) {
869            // Do nothing
870        }
871
872        public void writeStopTiming(Bundle results) {
873            // Copy results into mTestResult by flattening list of iterations,
874            // which is outputted via WatcherResultPrinter.endTest
875            int i = 0;
876            for (Parcelable p :
877                    results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
878                Bundle iteration = (Bundle)p;
879                String index = "iteration" + i + ".";
880                mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
881                        iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
882                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
883                        iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
884                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
885                        iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
886                i++;
887            }
888        }
889
890        public void writeMeasurement(String label, long value) {
891            mTestResult.putLong(label, value);
892        }
893
894        public void writeMeasurement(String label, float value) {
895            mTestResult.putFloat(label, value);
896        }
897
898        public void writeMeasurement(String label, String value) {
899            mTestResult.putString(label, value);
900        }
901
902        // TODO report the end of the cycle
903    }
904}
905