Exemplo n.º 1
0
def fetch_api(path, params=None):
    if GITHUB_TOKEN:
        headers = {"Authorization": "token {}".format(GITHUB_TOKEN)}
    else:
        headers = {}
    return fetch_json("https://api.github.com/{}".format(path), params,
                      headers)
Exemplo n.º 2
0
def get_commits(repository, revision):
    # This gets the list of revisions for the push.  Treeherder only holds the the last 20 per push, so we may
    # not have the oldest one.
    try:
        autorel_resp = fetch_json(
            'https://hg.mozilla.org/{}/json-automationrelevance/{}'.format(
                repository.name, revision))

        return list(autorel_resp["changesets"])
    except Exception:
        # fallback to using json-pushes

        try:
            json_pushes_resp = fetch_json(
                '{}/json-pushes?version=2&full=1&changeset={}'.format(
                    repository.url, revision))
            changesets = list(
                json_pushes_resp["pushes"].values())[0]['changesets']
            changesets.reverse()

            return changesets
        except Exception as json_push_ex:
            raise json_push_ex
Exemplo n.º 3
0
def download_artifact(root_url, task_id, path):
    """
    Downloads a Taskcluster artifact.
    Supports specific file formats like json and yaml.

    Returns either the parsed json, the parsed yaml or the plain response.
    """
    artifact_url = taskcluster_urls.api(
        root_url, 'queue', 'v1', 'task/{}/artifacts/{}'.format(task_id, path))

    if path.endswith(".json"):
        return fetch_json(artifact_url)
    if path.endswith(".yml"):
        return yaml.safe_load(fetch_text(artifact_url))

    return make_request(artifact_url)
Exemplo n.º 4
0
    def fetch_push(self, url, repository, sha=None):
        newrelic.agent.add_custom_parameter("sha", sha)

        logger.debug("fetching for %s %s", repository, url)
        # there will only ever be one, with this url
        push = list(fetch_json(url)["pushes"].values())[0]

        commits = []
        # we only want to ingest the last 200 commits for each push,
        # to protect against the 5000+ commit merges on release day uplift.
        for commit in push['changesets'][-200:]:
            commits.append({
                "revision": commit["node"],
                "author": commit["author"],
                "comment": commit["desc"],
            })

        return {
            "revision": commits[-1]["revision"],
            "author": push["user"],
            "push_timestamp": push["date"],
            "revisions": commits,
        }
Exemplo n.º 5
0
def get_task_definition(root_url, task_id):
    task_url = taskcluster_urls.api(root_url, 'queue', 'v1',
                                    'task/{}'.format(task_id))
    return fetch_json(task_url)
Exemplo n.º 6
0
 def get_bindings(self, queue_name):
     """Get list of bindings from the pulse API"""
     return fetch_json("{}queue/{}/bindings".format(PULSE_GUARDIAN_URL,
                                                    queue_name))