1# Copyright (c) 2009, Google Inc. All rights reserved.
2#
3# Redistribution and use in source and binary forms, with or without
4# modification, are permitted provided that the following conditions are
5# met:
6#
7#     * Redistributions of source code must retain the above copyright
8# notice, this list of conditions and the following disclaimer.
9#     * Redistributions in binary form must reproduce the above
10# copyright notice, this list of conditions and the following disclaimer
11# in the documentation and/or other materials provided with the
12# distribution.
13#     * Neither the name of Google Inc. nor the names of its
14# contributors may be used to endorse or promote products derived from
15# this software without specific prior written permission.
16#
17# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28#
29# WebKit's Python module for interacting with WebKit's buildbot
30
31import re
32import urllib2
33
34# Import WebKit-specific modules.
35from webkitpy.webkit_logging import log
36
37# WebKit includes a built copy of BeautifulSoup in Scripts/webkitpy
38# so this import should always succeed.
39from .BeautifulSoup import BeautifulSoup
40
41
42class BuildBot:
43
44    default_host = "build.webkit.org"
45
46    def __init__(self, host=default_host):
47        self.buildbot_host = host
48        self.buildbot_server_url = "http://%s/" % self.buildbot_host
49
50        # If any Leopard builder/tester, Windows builder or Chromium builder is
51        # red we should not be landing patches.  Other builders should be added
52        # to this list once they are known to be reliable.
53        # See https://bugs.webkit.org/show_bug.cgi?id=33296 and related bugs.
54        self.core_builder_names_regexps = [
55            "Leopard",
56            "Windows.*Build",
57            "Chromium",
58        ]
59
60    def _parse_builder_status_from_row(self, status_row):
61        # If WebKit's buildbot has an XMLRPC interface we could use, we could
62        # do something more sophisticated here.  For now we just parse out the
63        # basics, enough to support basic questions like "is the tree green?"
64        status_cells = status_row.findAll('td')
65        builder = {}
66
67        name_link = status_cells[0].find('a')
68        builder['name'] = name_link.string
69        # We could generate the builder_url from the name in a future version
70        # of this code.
71        builder['builder_url'] = self.buildbot_server_url + name_link['href']
72
73        status_link = status_cells[1].find('a')
74        if not status_link:
75            # We failed to find a link in the first cell, just give up.  This
76            # can happen if a builder is just-added, the first cell will just
77            # be "no build"
78            # Other parts of the code depend on is_green being present.
79            builder['is_green'] = False
80            return builder
81        # Will be either a revision number or a build number
82        revision_string = status_link.string
83        # If revision_string has non-digits assume it's not a revision number.
84        builder['built_revision'] = int(revision_string) \
85                                    if not re.match('\D', revision_string) \
86                                    else None
87        builder['is_green'] = not re.search('fail',
88                                            status_cells[1].renderContents())
89        # We could parse out the build number instead, but for now just store
90        # the URL.
91        builder['build_url'] = self.buildbot_server_url + status_link['href']
92
93        # We could parse out the current activity too.
94
95        return builder
96
97    def _builder_statuses_with_names_matching_regexps(self,
98                                                      builder_statuses,
99                                                      name_regexps):
100        builders = []
101        for builder in builder_statuses:
102            for name_regexp in name_regexps:
103                if re.match(name_regexp, builder['name']):
104                    builders.append(builder)
105        return builders
106
107    def red_core_builders(self):
108        red_builders = []
109        for builder in self._builder_statuses_with_names_matching_regexps(
110                               self.builder_statuses(),
111                               self.core_builder_names_regexps):
112            if not builder['is_green']:
113                red_builders.append(builder)
114        return red_builders
115
116    def red_core_builders_names(self):
117        red_builders = self.red_core_builders()
118        return map(lambda builder: builder['name'], red_builders)
119
120    def core_builders_are_green(self):
121        return not self.red_core_builders()
122
123    def builder_statuses(self):
124        build_status_url = self.buildbot_server_url + 'one_box_per_builder'
125        page = urllib2.urlopen(build_status_url)
126        soup = BeautifulSoup(page)
127
128        builders = []
129        status_table = soup.find('table')
130        for status_row in status_table.findAll('tr'):
131            builder = self._parse_builder_status_from_row(status_row)
132            builders.append(builder)
133        return builders
134