Searched refs:results (Results 151 - 175 of 1458) sorted by relevance

1234567891011>>

/external/chromium_org/tools/chrome_proxy/integration_tests/
H A Dchrome_proxy_measurements.py24 def ValidateAndMeasurePage(self, page, tab, results):
27 loading.LoadingMetric().AddResults(tab, results)
40 def ValidateAndMeasurePage(self, page, tab, results):
44 self._metrics.AddResultsForDataSaving(tab, results)
67 def ValidateAndMeasurePage(self, page, tab, results):
73 self.AddResults(tab, results)
75 def AddResults(self, tab, results):
101 def AddResults(self, tab, results):
102 self._metrics.AddResultsForHeaderValidation(tab, results)
111 def AddResults(self, tab, results)
[all...]
H A Dchrome_proxy_metrics_unittest.py124 results = test_page_test_results.TestPageTestResults(self)
126 metric.AddResultsForDataSaving(None, results)
127 results.AssertHasPageSpecificScalarValue('resources_via_proxy', 'count', 2)
128 results.AssertHasPageSpecificScalarValue('resources_from_cache', 'count', 1)
129 results.AssertHasPageSpecificScalarValue('resources_direct', 'count', 2)
139 results = test_page_test_results.TestPageTestResults(self)
143 metric.AddResultsForHeaderValidation(None, results)
153 metric.AddResultsForHeaderValidation(None, results)
154 results.AssertHasPageSpecificScalarValue('checked_via_header', 'count', 2)
163 results
[all...]
/external/chromium_org/tools/perf/metrics/
H A Dtimeline_unittest.py18 results = test_page_test_results.TestPageTestResults(self)
19 metric.AddResults(model, renderer_thread, interaction_records, results)
20 return results
34 results = self.GetResults(
37 results.AssertHasPageSpecificScalarValue(
39 results.AssertHasPageSpecificScalarValue(
41 results.AssertHasPageSpecificScalarValue(
59 results = self.GetResults(
62 results.AssertHasPageSpecificScalarValue(
64 results
[all...]
H A Dtimeline.py15 def AddResults(self, model, renderer_thread, interaction_records, results):
48 results.AddValue(scalar.ScalarValue(
49 results.current_page, full_name, 'ms', total))
50 results.AddValue(scalar.ScalarValue(
51 results.current_page, full_name + '_max', 'ms', biggest_jank))
52 results.AddValue(scalar.ScalarValue(
53 results.current_page, full_name + '_avg', 'ms', total / len(times)))
61 results.AddValue(scalar.ScalarValue(
62 results.current_page, sanitized_counter_name, 'count', total))
63 results
[all...]
H A Dmedia.py37 def AddResults(self, tab, results):
38 """Reports all recorded metrics as Telemetry perf results."""
41 trace_names.append(self._AddResultsForMediaElement(media_metric, results))
45 def _AddResultsForMediaElement(self, media_metric, results):
65 results.AddValue(list_of_scalar_values.ListOfScalarValues(
66 results.current_page, trace_name, unit,
70 results.AddValue(scalar.ScalarValue(
71 results.current_page, trace_name, unit, value=float(metrics[m]),
76 logging.error('Metrics ID is missing in results.')
H A Dstartup_metric.py45 def _RecordTabLoadTimes(self, tab, browser_main_entry_time_ms, results):
88 results.AddValue(scalar.ScalarValue(
89 results.current_page, 'foreground_tab_load_complete', 'ms',
92 def AddResults(self, tab, results):
108 results.AddValue(scalar.ScalarValue(
109 results.current_page, display_name, 'ms', measured_time))
116 self._RecordTabLoadTimes(tab, browser_main_entry_time_ms, results)
/external/chromium_org/chrome/common/extensions/docs/examples/api/devtools/audits/broken-links/
H A Dbackground.js6 var results = [];
18 results.push({
27 callback({ total: links.length, badlinks: results });
45 chrome.tabs.sendRequest(tabId, {}, function(results) {
46 validateLinks(results, callback);
/external/junit/src/org/junit/internal/runners/
H A DTestClass.java42 List<Method> results= new ArrayList<Method>();
47 if (annotation != null && ! isShadowed(eachMethod, results))
48 results.add(eachMethod);
52 Collections.reverse(results);
53 return results;
60 private boolean isShadowed(Method method, List<Method> results) { argument
61 for (Method each : results) {
81 ArrayList<Class<?>> results= new ArrayList<Class<?>>();
84 results.add(current);
87 return results;
[all...]
/external/srec/srec/Semproc/src/
H A DSemanticResultImpl.c50 impl->results = NULL;
52 rc = HashMapCreate(&impl->results);
67 CHKLOG(rc, impl->results->getSize(impl->results, count));
80 CHKLOG(rc, HashMapGetSize(impl->results, &size));
96 CHKLOG(rc, HashMapGetKeyAtIndex(impl->results, i, &theKey));
110 CHKLOG(rc, impl->results->get(impl->results, key, (void **)&theValue));
128 CHKLOG(rc, HashMapRemoveAndFreeAll(impl->results));
129 CHKLOG(rc, HashMapDestroy(impl->results));
[all...]
/external/chromium_org/chrome/browser/diagnostics/
H A Ddiagnostics_controller_unittest.cc88 const DiagnosticsModel& results = local
90 EXPECT_EQ(results.GetTestRunCount(), results.GetTestAvailableCount());
91 EXPECT_EQ(DiagnosticsModel::kDiagnosticsTestCount, results.GetTestRunCount());
92 for (int i = 0; i < results.GetTestRunCount(); ++i) {
93 const DiagnosticsModel::TestInfo& info(results.GetTest(i));
103 const DiagnosticsModel& results = local
105 EXPECT_EQ(results.GetTestRunCount(), results.GetTestAvailableCount());
106 EXPECT_EQ(DiagnosticsModel::kDiagnosticsTestCount, results
121 const DiagnosticsModel& results = local
143 const DiagnosticsModel& results = local
[all...]
/external/chromium_org/ppapi/
H A DPRESUBMIT.py12 results = []
19 results.append(output_api.PresubmitPromptWarning(
22 results.append(
25 return results
30 results = []
40 results.extend(RunCmdAndCheck(cmd,
44 return results
189 results = []
191 results.extend(RunUnittests(input_api, output_api))
193 results
[all...]
/external/chromium_org/third_party/skia/
H A DPRESUBMIT.py79 results = []
86 results.extend(
89 results.extend(_PythonChecks(input_api, output_api))
90 return results
99 results = []
100 results.extend(_CommonChecks(input_api, output_api))
101 return results
143 results = []
163 results.append(
181 return results
[all...]
/external/skia/gm/rebaseline_server/
H A Dcompare_rendered_pictures.py9 Compare results of two render_pictures runs.
25 import results namespace
33 class RenderedPicturesComparisons(results.BaseComparisons):
34 """Loads results from two different render_pictures runs into an ImagePairSet.
38 generated_images_root=results.DEFAULT_GENERATED_IMAGES_ROOT,
79 'Reading actual-results JSON files from %s subdirs within %s...' % (
96 column_id=results.KEY__EXTRACOLUMNS__RESULT_TYPE, values=[
97 results.KEY__RESULT_TYPE__FAILED,
98 results.KEY__RESULT_TYPE__NOCOMPARISON,
99 results
[all...]
/external/chromium_org/components/policy/resources/
H A DPRESUBMIT.py69 results = []
71 results.append(output_api.PresubmitError(error_missing % policy))
73 results.append(output_api.PresubmitError(error_extra % policy))
74 return results
94 results = []
97 results.append(output_api.PresubmitError(error_missing % policy['name']))
98 return results
102 results = []
103 results.extend(_CheckPolicyTemplatesSyntax(input_api, output_api))
113 results
[all...]
/external/chromium_org/chrome/browser/
H A DPRESUBMIT.py26 results = []
43 results.extend(
64 results.extend(resource_checker.ResourceChecker(
66 results.extend(css_checker.CSSChecker(
68 results.extend(html_checker.HtmlChecker(
70 results.extend(js_checker.JSChecker(
75 return results
/external/chromium_org/tools/gn/
H A Dcommand_refs.cc47 void OutputResultSet(const TargetSet& results, bool as_files) { argument
48 if (results.empty())
54 for (TargetSet::const_iterator iter = results.begin();
55 iter != results.end(); ++iter)
66 for (TargetSet::const_iterator iter = results.begin();
67 iter != results.end(); ++iter)
72 (*results.begin())->settings()->default_toolchain_label();
127 TargetSet* results);
133 TargetSet* results) {
134 if (results
131 RecursiveCollectRefs(const DepMap& dep_map, const Target* target, TargetSet* results) argument
141 RecursiveCollectChildRefs(const DepMap& dep_map, const Target* target, TargetSet* results) argument
264 TargetSet results; local
270 TargetSet results; local
[all...]
/external/eclipse-basebuilder/basebuilder-3.6.2/org.eclipse.releng.basebuilder/plugins/org.eclipse.test.performance.ui/src/org/eclipse/test/internal/performance/results/model/
H A DComponentResultsElement.java11 package org.eclipse.test.internal.performance.results.model;
21 import org.eclipse.test.internal.performance.results.db.AbstractResults;
22 import org.eclipse.test.internal.performance.results.db.ComponentResults;
23 import org.eclipse.test.internal.performance.results.db.PerformanceResults;
24 import org.eclipse.test.internal.performance.results.db.ScenarioResults;
25 import org.eclipse.test.internal.performance.results.utils.IPerformancesConstants;
26 import org.eclipse.test.internal.performance.results.utils.Util;
74 public ComponentResultsElement(AbstractResults results, ResultsElement parent) { argument
75 super(results, parent);
81 * @see org.eclipse.test.internal.performance.results
[all...]
/external/chromium_org/base/test/
H A Dgtest_xml_util.cc91 std::vector<TestResult>* results,
93 DCHECK(results);
157 results->push_back(result);
187 if (!results->empty() &&
188 results->at(results->size() - 1).full_name == result.full_name &&
189 results->at(results->size() - 1).status ==
193 results->pop_back();
196 results
90 ProcessGTestOutput(const base::FilePath& output_file, std::vector<TestResult>* results, bool* crashed) argument
[all...]
/external/chromium-trace/trace-viewer/src/tracing/analysis/
H A Danalyze_slices_test.js107 var results = new StubAnalysisResults();
108 tracing.analysis.analyzeSelection(results, selection);
109 assertEquals(1, results.tables.length);
110 var table = results.tables[0];
122 var results = new StubAnalysisResults();
123 tracing.analysis.analyzeSelection(results, selection);
124 assertEquals(1, results.tables.length);
125 var table = results.tables[0];
138 var results = new StubAnalysisResults();
139 tracing.analysis.analyzeSelection(results, selectio
[all...]
/external/chromium_org/net/dns/
H A Dmdns_cache_unittest.cc153 std::vector<const RecordParsed*> results; local
162 cache_.FindDnsRecords(ARecordRdata::kType, "ghs.l.google.com", &results,
165 EXPECT_EQ(1u, results.size());
166 EXPECT_EQ(default_time_, results.front()->time_created());
168 EXPECT_EQ("ghs.l.google.com", results.front()->name());
170 results.clear();
171 cache_.FindDnsRecords(PtrRecordRdata::kType, "ghs.l.google.com", &results,
174 EXPECT_EQ(0u, results.size());
185 std::vector<const RecordParsed*> results; local
198 cache_.FindDnsRecords(ARecordRdata::kType, "ghs.l.google.com", &results,
235 std::vector<const RecordParsed*> results; local
254 std::vector<const RecordParsed*> results; local
273 std::vector<const RecordParsed*> results; local
298 std::vector<const RecordParsed*> results; local
328 std::vector<const RecordParsed*> results; local
354 std::vector<const RecordParsed*> results; local
[all...]
/external/chromium_org/base/debug/
H A Dcrash_logging_unittest.cc144 std::vector<std::string> results = local
146 ASSERT_EQ(1u, results.size());
147 EXPECT_EQ("hello worl", results[0]);
150 results = ChunkCrashKeyValue(key, "hi", 10);
151 ASSERT_EQ(1u, results.size());
152 EXPECT_EQ("hi", results[0]);
156 results = ChunkCrashKeyValue(key, "foobar", 3);
157 ASSERT_EQ(2u, results.size());
158 EXPECT_EQ("foo", results[0]);
159 EXPECT_EQ("bar", results[
[all...]
/external/chromium_org/chrome/renderer/spellchecker/
H A Dspellcheck_provider_test.cc24 const blink::WebVector<blink::WebTextCheckingResult>& results) {
78 std::vector<blink::WebTextCheckingResult> results;
79 results.push_back(blink::WebTextCheckingResult(
82 completion->didFinishCheckingText(results);
84 last_results_ = results;
23 didFinishCheckingText( const blink::WebVector<blink::WebTextCheckingResult>& results) argument
/external/chromium_org/third_party/WebKit/Source/core/xml/
H A DXSLTUnicodeSort.cpp54 xmlXPathObjectPtr* results = 0; local
112 results = resultsTab[0];
117 if (!results)
132 if (!results[i])
137 if (!results[j]) {
143 if (xmlXPathIsNaN(results[j]->floatval)) {
144 if (xmlXPathIsNaN(results[j + incr]->floatval))
148 } else if (xmlXPathIsNaN(results[j + incr]->floatval)) {
150 } else if (results[j]->floatval == results[
[all...]
/external/chromium_org/tools/perf/benchmarks/
H A Dcanvasmark.py32 def ValidateAndMeasurePage(self, _, tab, results):
37 # Split the results into score and test name.
38 # results log e.g., "489 [Test 1 - Asteroids - Bitmaps]"
44 results.AddValue(scalar.ScalarValue(
45 results.current_page, name, 'score', score, important=False))
48 results.AddValue(scalar.ScalarValue(
49 results.current_page, 'Score', 'score', total))
H A Dmaps.py20 def ValidateAndMeasurePage(self, page, tab, results):
26 results.AddValue(scalar.ScalarValue(
27 results.current_page, 'total_time', 'ms', total))
28 results.AddValue(scalar.ScalarValue(
29 results.current_page, 'render_mean_time', 'ms', render))

Completed in 582 milliseconds

1234567891011>>