Beispiel #1
0
    def pushes(self, **kwargs):
        """
        Returns a sorted lists of Push objects. The list can not be empty.

        Basically issue a raw request to the server.
        """
        base_url = "%s/json-pushes?" % self.repo_url
        url = base_url + "&".join(
            sorted("%s=%s" % kv for kv in six.iteritems(kwargs)))
        LOG.debug("Using url: %s" % url)

        response = retry_get(url)
        data = response.json()

        if (response.status_code == 404 and data is not None
                and "error" in data and "unknown revision" in data["error"]):
            raise EmptyPushlogError(
                "The url %r returned a 404 error because the push is not"
                " in this repo (e.g., not merged yet)." % url)
        response.raise_for_status()

        if not data:
            raise EmptyPushlogError(
                "The url %r contains no pushlog. Maybe use another range ?" %
                url)

        pushlog = []
        for key in sorted(data):
            pushlog.append(Push(key, data[key]))
        return pushlog
Beispiel #2
0
    def pushes(self, **kwargs):
        """
        Returns a sorted lists of Push objects. The list can not be empty.

        Basically issue a raw request to the server.
        """
        base_url = '%s/json-pushes?' % self.repo_url
        url = base_url + '&'.join("%s=%s" % kv for kv in six.iteritems(kwargs))
        LOG.debug("Using url: %s" % url)

        response = retry_get(url)
        if response.status_code == 404:
            raise MozRegressionError(
                "The url %r returned a 404 error. Please check the"
                " validity of the url." % url)
        response.raise_for_status()
        data = response.json()
        if not data:
            raise EmptyPushlogError(
                "The url %r contains no pushlog. Maybe use another range ?" %
                url)
        pushlog = []
        for key in sorted(data):
            pushlog.append(Push(key, data[key]))
        return pushlog
Beispiel #3
0
    def pushes(self, **kwargs):
        """
        Returns a sorted lists of Push objects. The list can not be empty.

        Basically issue a raw request to the server.
        """
        base_url = '%s/json-pushes?' % self.repo_url
        url = base_url + '&'.join("%s=%s" % kv for kv in kwargs.iteritems())
        LOG.debug("Using url: %s" % url)

        response = retry_get(url)
        if response.status_code == 404:
            raise MozRegressionError(
                "The url %r returned a 404 error. Please check the"
                " validity of the url." % url
            )
        response.raise_for_status()
        data = response.json()
        if not data:
            raise EmptyPushlogError(
                "The url %r contains no pushlog. Maybe use another range ?"
                % url
            )
        pushlog = []
        for key in sorted(data):
            pushlog.append(Push(key, data[key]))
        return pushlog
Beispiel #4
0
 def _request(self, url):
     response = retry_get(url)
     if response.status_code == 404:
         raise MozRegressionError(
             "The url %r returned a 404 error. Please check the"
             " validity of the url." % url)
     response.raise_for_status()
     pushlog = response.json()
     if not pushlog:
         raise EmptyPushlogError(
             "The url %r contains no pushlog. Maybe use another range ?" %
             url)
     return pushlog
Beispiel #5
0
 def _request(self, url):
     response = retry_get(url)
     if response.status_code == 404:
         raise MozRegressionError(
             "The url %r returned a 404 error. Please check the"
             " validity of the url." % url
         )
     response.raise_for_status()
     pushlog = response.json()
     if not pushlog:
         raise EmptyPushlogError(
             "The url %r contains no pushlog. Maybe use another range ?"
             % url
         )
     return pushlog
Beispiel #6
0
 def _request(self, url, check_changeset):
     response = retry_get(url)
     if response.status_code == 404:
         raise MozRegressionError(
             "The url %r returned a 404 error. Please check the"
             " validity of the given changeset %r." %
             (url, check_changeset)
         )
     response.raise_for_status()
     pushlog = response.json()
     if not pushlog:
         raise MozRegressionError(
             "The url %r contains no pushlog. Please check the"
             " validity of the given changeset %r." %
             (url, check_changeset)
         )
     return pushlog
    def _fetch_txt_info(self, url):
        """
        Retrieve information from a build information txt file.

        Returns a dict with keys repository and changeset if information
        is found.
        """
        data = {}
        response = retry_get(url)
        for line in response.text.splitlines():
            if '/rev/' in line:
                repository, changeset = line.split('/rev/')
                data['repository'] = repository
                data['changeset'] = changeset
                break
        if not data:
            # the txt file could be in an old format:
            # DATE CHANGESET
            # we can try to extract that to get the changeset at least.
            matched = re.match('^\d+ (\w+)$', response.text.strip())
            if matched:
                data['changeset'] = matched.group(1)
        return data
    def _fetch_txt_info(self, url):
        """
        Retrieve information from a build information txt file.

        Returns a dict with keys repository and changeset if information
        is found.
        """
        data = {}
        response = retry_get(url)
        for line in response.text.splitlines():
            if "/rev/" in line:
                repository, changeset = line.split("/rev/")
                data["repository"] = repository
                data["changeset"] = changeset
                break
        if not data:
            # the txt file could be in an old format:
            # DATE CHANGESET
            # we can try to extract that to get the changeset at least.
            matched = re.match(r"^\d+ (\w+)$", response.text.strip())
            if matched:
                data["changeset"] = matched.group(1)
        return data
Beispiel #9
0
def releases():
    """
    Provide the list of releases with their associated dates.

    The date is a string formated as "yyyy-mm-dd", and the release an integer.
    """
    # The dates comes from from https://wiki.mozilla.org/RapidRelease/Calendar,
    # using the ones in the "beta" column (formerly "aurora"). This is because
    # the merge date for beta corresponds to the last nightly for that
    # release. See bug 996812.
    releases = {
        5: "2011-04-12",
        6: "2011-05-24",
        7: "2011-07-05",
        8: "2011-08-16",
        9: "2011-09-27",
        10: "2011-11-08",
        11: "2011-12-20",
        12: "2012-01-31",
        13: "2012-03-13",
        14: "2012-04-24",
        15: "2012-06-05",
        16: "2012-07-16",
        17: "2012-08-27",
        18: "2012-10-08",
        19: "2012-11-19",
        20: "2013-01-07",
        21: "2013-02-19",
        22: "2013-04-01",
        23: "2013-05-13",
        24: "2013-06-24",
        25: "2013-08-05",
        26: "2013-09-16",
        27: "2013-10-28",
        28: "2013-12-09",
        29: "2014-02-03",
        30: "2014-03-17",
        31: "2014-04-28",
        32: "2014-06-09",
        33: "2014-07-21",
        34: "2014-09-02",
        35: "2014-10-13",
        36: "2014-11-28",
        37: "2015-01-12",
        38: "2015-02-23",
        39: "2015-03-30",
        40: "2015-05-11",
        41: "2015-06-29",
        42: "2015-08-10",
        43: "2015-09-21",
        44: "2015-10-29",
        45: "2015-12-14",
        46: "2016-01-25",
        47: "2016-03-07",
        48: "2016-04-25",
        49: "2016-06-06",
        50: "2016-08-01",
        51: "2016-09-19",
        52: "2016-11-14",
        53: "2017-01-23",
        54: "2017-03-06",
        55: "2017-06-12",
        56: "2017-08-02",
    }

    def filter_tags(tag_node):
        match = re.match(r"^FIREFOX_NIGHTLY_(\d+)_END$", tag_node["tag"])
        return int(match.group(1)) > 56 if match else False

    def map_tags(tag_node):
        release = {}
        merge_date = date.fromtimestamp(tag_node["date"][0] +
                                        tag_node["date"][1])
        ver_match = re.search(r"_(\d+)_", tag_node["tag"])
        release[int(ver_match.group(1))] = merge_date.isoformat()
        return release

    tags_url = "https://hg.mozilla.org/mozilla-central/json-tags"
    response = retry_get(tags_url)

    if response.status_code == 200:
        fetched_releases = list(
            map(map_tags, list(filter(filter_tags,
                                      response.json()["tags"]))))

        for release in fetched_releases:
            releases.update(release)

    return releases
Beispiel #10
0
 def run(self):
     data = retry_get(self.GITHUB_LATEST_RELEASE_URL).json()
     self.tag_name = data['tag_name']
     self.release_url = data['html_url']
Beispiel #11
0
 def run(self):
     data = retry_get(self.GITHUB_LATEST_RELEASE_URL,
                      verify=cacert_path()).json()
     self.tag_name = data['tag_name']
     self.release_url = data['html_url']
Beispiel #12
0
def releases():
    """
    Provide the list of releases with their associated dates.

    The date is a string formated as "yyyy-mm-dd", and the release an integer.
    """
    # The dates comes from from https://wiki.mozilla.org/RapidRelease/Calendar,
    # using the ones in the "beta" column (formerly "aurora"). This is because
    # the merge date for beta corresponds to the last nightly for that
    # release. See bug 996812.
    releases = {
        5: "2011-04-12",
        6: "2011-05-24",
        7: "2011-07-05",
        8: "2011-08-16",
        9: "2011-09-27",
        10: "2011-11-08",
        11: "2011-12-20",
        12: "2012-01-31",
        13: "2012-03-13",
        14: "2012-04-24",
        15: "2012-06-05",
        16: "2012-07-16",
        17: "2012-08-27",
        18: "2012-10-08",
        19: "2012-11-19",
        20: "2013-01-07",
        21: "2013-02-19",
        22: "2013-04-01",
        23: "2013-05-13",
        24: "2013-06-24",
        25: "2013-08-05",
        26: "2013-09-16",
        27: "2013-10-28",
        28: "2013-12-09",
        29: "2014-02-03",
        30: "2014-03-17",
        31: "2014-04-28",
        32: "2014-06-09",
        33: "2014-07-21",
        34: "2014-09-02",
        35: "2014-10-13",
        36: "2014-11-28",
        37: "2015-01-12",
        38: "2015-02-23",
        39: "2015-03-30",
        40: "2015-05-11",
        41: "2015-06-29",
        42: "2015-08-10",
        43: "2015-09-21",
        44: "2015-10-29",
        45: "2015-12-14",
        46: "2016-01-25",
        47: "2016-03-07",
        48: "2016-04-25",
        49: "2016-06-06",
        50: "2016-08-01",
        51: "2016-09-19",
        52: "2016-11-14",
        53: "2017-01-23",
        54: "2017-03-06",
        55: "2017-06-12",
        56: "2017-08-02"
    }

    def filter_tags(tag_node):
        match = re.match("^FIREFOX_NIGHTLY_(\d+)_END$", tag_node["tag"])
        return int(match.group(1)) > 56 if match else False

    def map_tags(tag_node):
        release = {}
        merge_date = date.fromtimestamp(tag_node["date"][0] + tag_node["date"][1])
        ver_match = re.search("_(\d+)_", tag_node["tag"])
        release[int(ver_match.group(1))] = merge_date.isoformat()
        return release

    tags_url = "https://hg.mozilla.org/mozilla-central/json-tags"
    response = retry_get(tags_url)

    if response.status_code == 200:
        fetched_releases = map(
            map_tags,
            filter(filter_tags, response.json()["tags"])
        )

        for release in fetched_releases:
            releases.update(release)

    return releases
Beispiel #13
0
 def run(self):
     data = retry_get(self.GITHUB_LATEST_RELEASE_URL).json()
     self.tag_name = data.get("tag_name")
     self.release_url = data.get("html_url")