def _get_browser(self): if not self._browser: from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = Browser() # Ignore bugs.webkit.org/robots.txt until we fix it to allow this script. self._browser.set_handle_robots(False) return self._browser
def _get_browser(self): if not self._browser: self.setdefaulttimeout(600) from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = Browser() self._browser.set_handle_robots(False) return self._browser
def __init__(self, name, buildbot): self._name = name self._buildbot = buildbot self._builds_cache = {} self._revision_to_build_number = None self._browser = Browser() self._browser.set_handle_robots( False) # The builder pages are excluded by robots.txt
def __init__(self, host=statusserver_default_host, use_https=True, browser=None, bot_id=None): self.set_host(host) self.set_use_https(use_https) self._api_key = '' from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = browser or Browser() self._browser.set_handle_robots(False) self.set_bot_id(bot_id)
def __init__(self, name, buildbot): self._name = name self._buildbot = buildbot self._builds_cache = {} self._revision_to_build_number = None from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = Browser() self._browser.set_handle_robots(False) # The builder pages are excluded by robots.txt
def __init__(self, host=ewsserver_default_host, use_https=True, browser=None): self.host = host self.use_https = bool(use_https) from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = browser or Browser() self._browser.set_handle_robots(False)
def __init__(self, dryrun=False, committers=committers.CommitterList()): self.dryrun = dryrun self.authenticated = False self.queries = BugzillaQueries(self) self.committers = committers self.cached_quips = [] # FIXME: We should use some sort of Browser mock object when in dryrun # mode (to prevent any mistakes). self.browser = Browser() # Ignore bugs.webkit.org/robots.txt until we fix it to allow this # script. self.browser.set_handle_robots(False)
def __init__(self, dryrun=False, committers=committers.CommitterList()): self.dryrun = dryrun self.authenticated = False self.queries = BugzillaQueries(self) self.committers = committers self.cached_quips = [] self.edit_user_parser = EditUsersParser() # FIXME: We should use some sort of Browser mock object when in dryrun # mode (to prevent any mistakes). from webkitpy.thirdparty.autoinstalled.mechanize import Browser self.browser = Browser() # Ignore bugs.webkit.org/robots.txt until we fix it to allow this script. self.browser.set_handle_robots(False)
def force_build(self, username="******", comments=None): def predicate(form): try: return form.find_control("username") except Exception as e: return False if not self._browser: self._browser = Browser() self._browser.set_handle_robots(False) # The builder pages are excluded by robots.txt # ignore false positives for missing Browser methods - pylint: disable=E1102 self._browser.open(self.url()) self._browser.select_form(predicate=predicate) self._browser["username"] = username if comments: self._browser["comments"] = comments return self._browser.submit()
def __init__(self, host=statusserver_default_host, browser=None, bot_id=None): self.set_host(host) from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = browser or Browser() self.set_bot_id(bot_id)
def __init__(self, host): self._host = host self._browser = Browser()
class Builder(object): def __init__(self, name, buildbot): self._name = name self._buildbot = buildbot self._builds_cache = {} self._revision_to_build_number = None self._browser = None def name(self): return self._name def results_url(self): return "%s/results/%s" % (self._buildbot.buildbot_url, self.url_encoded_name()) # In addition to per-build results, the build.chromium.org builders also # keep a directory that accumulates test results over many runs. def accumulated_results_url(self): return None def latest_layout_test_results_url(self): return self.accumulated_results_url() or self.latest_cached_build( ).results_url() @memoized def latest_layout_test_results(self): return self.fetch_layout_test_results( self.latest_layout_test_results_url()) def _fetch_file_from_results(self, results_url, file_name): # It seems this can return None if the url redirects and then returns 404. result = urllib2.urlopen("%s/%s" % (results_url, file_name)) if not result: return None # urlopen returns a file-like object which sometimes works fine with str() # but sometimes is a addinfourl object. In either case calling read() is correct. return result.read() def fetch_layout_test_results(self, results_url): # FIXME: This should cache that the result was a 404 and stop hitting the network. results_file = NetworkTransaction(convert_404_to_None=True).run( lambda: self._fetch_file_from_results(results_url, "full_results.json")) return LayoutTestResults.results_from_string(results_file) def url_encoded_name(self): return urllib.quote(self._name) def url(self): return "%s/builders/%s" % (self._buildbot.buildbot_url, self.url_encoded_name()) # This provides a single place to mock def _fetch_build(self, build_number): build_dictionary = self._buildbot._fetch_build_dictionary( self, build_number) if not build_dictionary: return None revision_string = build_dictionary['sourceStamp']['revision'] return Build( self, build_number=int(build_dictionary['number']), # 'revision' may be None if a trunk build was started by the force-build button on the web page. revision=(int(revision_string) if revision_string else None), # Buildbot uses any nubmer other than 0 to mean fail. Since we fetch with # filter=1, passing builds may contain no 'results' value. is_green=(not build_dictionary.get('results')), ) def build(self, build_number): if not build_number: return None cached_build = self._builds_cache.get(build_number) if cached_build: return cached_build build = self._fetch_build(build_number) self._builds_cache[build_number] = build return build def latest_cached_build(self): revision_build_pairs = self.revision_build_pairs_with_results() revision_build_pairs.sort(key=lambda i: i[1]) latest_build_number = revision_build_pairs[-1][1] return self.build(latest_build_number) def force_build(self, username="******", comments=None): def predicate(form): try: return form.find_control("username") except Exception, e: return False if not self._browser: self._browser = Browser() self._browser.set_handle_robots( False) # The builder pages are excluded by robots.txt # ignore false positives for missing Browser methods - pylint: disable=E1102 self._browser.open(self.url()) self._browser.select_form(predicate=predicate) self._browser["username"] = username if comments: self._browser["comments"] = comments return self._browser.submit()
def __init__(self, url=default_url, browser=None): self._chrome_channels = set(self.chrome_channels) self.set_url(url) from webkitpy.thirdparty.autoinstalled.mechanize import Browser self._browser = browser or Browser()
def __init__(self, host=default_host, browser=None, bot_id=None): self.set_host(host) self._browser = browser or Browser() self.set_bot_id(bot_id)
def __init__(self, host=default_host): self.set_host(host) self.browser = Browser()