Exemple #1
0
def get_task_url(task_id, use_proxy=False):
    if use_proxy:
        # Until bug 1460015 is finished, use the old baseUrl style of proxy URL
        task_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/queue/v1/task/{}'
    else:
        task_tmpl = liburls.api(get_root_url(), 'queue', 'v1', 'task/{}')
    return task_tmpl.format(task_id)
Exemple #2
0
def get_index_url(index_path, use_proxy=False, multiple=False):
    if use_proxy:
        # Until bug 1460015 is finished, use the old baseUrl style of proxy URL
        index_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/index/v1/task{}/{}'
    else:
        index_tmpl = liburls.api(get_root_url(), 'index', 'v1', 'task{}/{}')
    return index_tmpl.format('s' if multiple else '', index_path)
Exemple #3
0
def find_task_id(index_path, root_url):
    index_url = liburls.api(root_url, 'index', 'v1',
                            'task/{}'.format(index_path))
    response = requests.get(index_url)
    if response.status_code == 404:
        raise Exception("Index URL {} not found".format(index_url))
    return response.json()['taskId']
Exemple #4
0
async def addArtifactUploadedLinks(root_url, taskId, runId, job):
    artifacts = []
    try:
        artifacts = await fetchArtifacts(root_url, taskId, runId)
    except Exception:
        logger.warning("Artifacts could not be found for task: %s run: %s", taskId, runId)
        return job

    seen = {}
    links = []
    for artifact in artifacts:
        name = os.path.basename(artifact["name"])
        if not seen.get(name):
            seen[name] = [artifact["name"]]
        else:
            seen[name].append(artifact["name"])
            name = "{name} ({length})".format(name=name, length=len(seen[name])-1)

        links.append({
            "label": "artifact uploaded",
            "linkText": name,
            "url": taskcluster_urls.api(
                root_url,
                "queue",
                "v1",
                "task/{taskId}/runs/{runId}/artifacts/{artifact_name}".format(
                    taskId=taskId, runId=runId, artifact_name=artifact["name"]
                )),
        })

    job["jobInfo"]["links"] = links
    return job
Exemple #5
0
def get_indexed_tasks_url(namespace, root_url=PRODUCTION_TASKCLUSTER_ROOT_URL):
    return liburls.api(
        root_url,
        "index",
        "v1",
        f"tasks/{namespace}",
    )
Exemple #6
0
class TestBuildUrl(ClientTest):

    apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1',
                          'two_args_no_input/arg0/arg1')

    def test_build_url_positional(self):
        actual = self.client.buildUrl('two_args_no_input', 'arg0', 'arg1')
        self.assertEqual(self.apiPath, actual)

    def test_build_url_keyword(self):
        actual = self.client.buildUrl('two_args_no_input',
                                      arg0='arg0',
                                      arg1='arg1')
        self.assertEqual(self.apiPath, actual)

    def test_build_url_query_string(self):
        actual = self.client.buildUrl('two_args_no_input',
                                      params={
                                          'arg0': 'arg0',
                                          'arg1': 'arg1'
                                      },
                                      query={'qs0': 1})
        self.assertEqual(self.apiPath + '?qs0=1', actual)

    def test_fails_to_build_url_for_missing_method(self):
        with self.assertRaises(exc.TaskclusterFailure):
            self.client.buildUrl('non-existing')

    def test_fails_to_build_not_enough_args(self):
        with self.assertRaises(exc.TaskclusterFailure):
            self.client.buildUrl('two_args_no_input', 'not-enough-args')
Exemple #7
0
def get_artifact_url(task_id, path, root_url=PRODUCTION_TASKCLUSTER_ROOT_URL):
    return liburls.api(
        root_url,
        "queue",
        "v1",
        f"task/{task_id}/artifacts/{path}",
    )
Exemple #8
0
def get_index_url(index_path, root_url=PRODUCTION_TASKCLUSTER_ROOT_URL):
    return liburls.api(
        root_url,
        "index",
        "v1",
        f"task/{index_path}",
    )
 def _constructUrl(self, route):
     """Construct a URL for the given route on this service, based on the
     rootUrl"""
     return liburls.api(
         self.options['rootUrl'],
         self.serviceName,
         self.apiVersion,
         route.rstrip('/'))
Exemple #10
0
def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False):
    if use_proxy:
        # Until bug 1460015 is finished, use the old baseUrl style of proxy URL
        url_tmpl = os.environ[
            'TASKCLUSTER_PROXY_URL'] + '/purge-cache/v1/purge-cache/{}/{}'
    else:
        url_tmpl = liburls.api(get_root_url(), 'purge-cache', 'v1',
                               'purge-cache/{}/{}')
    return url_tmpl.format(provisioner_id, worker_type)
Exemple #11
0
def createLogReference(root_url, taskId, runId):
    logUrl = taskcluster_urls.api(
        root_url, "queue", "v1",
        "task/{taskId}/runs/{runId}/artifacts/public/logs/live_backing.log"
    ).format(taskId=taskId, runId=runId)
    return {
        "name": "live_backing_log",
        "url": logUrl,
    }
Exemple #12
0
def get_artifact_url(task_id, path, old_deployment=False):
    if not old_deployment:
        return liburls.api(
            PRODUCTION_TASKCLUSTER_ROOT_URL,
            "queue",
            "v1",
            f"task/{task_id}/artifacts/{path}",
        )
    else:
        return f"https://queue.taskcluster.net/v1/task/{task_id}/artifacts/{path}"
Exemple #13
0
def createLogReference(taskId, runId):
    logUrl = taskcluster_urls.api(
        root_url, "queue", "v1",
        "task/{taskId}/runs/{runId}/artifacts/public/logs/live_backing.log"
    ).format(taskId=taskId, runId=runId)
    return {
        # XXX: This is a magical name see 1147958 which enables the log viewer.
        "name": "builds-4h",
        "url": logUrl,
    }
Exemple #14
0
def send_email(address, subject, content, link, use_proxy=False):
    """Sends an email using the notify service"""
    logger.info('Sending email to {}.'.format(address))
    url = liburls.api(get_root_url(use_proxy), 'notify', 'v1', 'email')
    _do_request(url, json={
        'address': address,
        'subject': subject,
        'content': content,
        'link': link,
    })
 def buildUrl(self, methodName, *args, **kwargs):
     entry = self.funcinfo.get(methodName)
     if not entry:
         raise exceptions.TaskclusterFailure(
             'Requested method "%s" not found in API Reference' % methodName)
     routeParams, _, query, _, _ = self._processArgs(entry, *args, **kwargs)
     route = self._subArgsInRoute(entry, routeParams)
     if query:
         route += '?' + urllib.parse.urlencode(query)
     return liburls.api(self.options['rootUrl'], self.serviceName, self.apiVersion, route)
Exemple #16
0
 def buildUrl(self, methodName, *args, **kwargs):
     entry = self.funcinfo.get(methodName)
     if not entry:
         raise exceptions.TaskclusterFailure(
             'Requested method "%s" not found in API Reference' %
             methodName)
     routeParams, _, query, _, _ = self._processArgs(entry, *args, **kwargs)
     route = self._subArgsInRoute(entry, routeParams)
     if query:
         route += '?' + urllib.parse.urlencode(query)
     return liburls.api(self.options['rootUrl'], self.serviceName,
                        self.apiVersion, route)
Exemple #17
0
 def _root_url(self, artifactdir=None, objdir=None):
     """Generate a publicly-accessible URL for the tasks's artifacts, or an objdir path"""
     if 'TASK_ID' in os.environ and 'RUN_ID' in os.environ:
         import taskcluster_urls
         from taskgraph.util.taskcluster import get_root_url
         return taskcluster_urls.api(
             get_root_url(False), 'queue', 'v1',
             'task/{}/runs/{}/artifacts/{}'.format(os.environ['TASK_ID'],
                                                   os.environ['RUN_ID'],
                                                   artifactdir))
     else:
         return os.path.join(self.topobjdir, objdir)
Exemple #18
0
def send_email(address, subject, content, link, use_proxy=False):
    """Sends an email using the notify service"""
    logger.info(f"Sending email to {address}.")
    url = liburls.api(get_root_url(use_proxy), "notify", "v1", "email")
    _do_request(
        url,
        json={
            "address": address,
            "subject": subject,
            "content": content,
            "link": link,
        },
    )
Exemple #19
0
def list_task_group_tasks(task_group_id):
    """Generate the tasks in a task group"""
    params = {}
    while True:
        url = liburls.api(get_root_url(False), 'queue', 'v1',
                          'task-group/{}/list'.format(task_group_id))
        resp = _do_request(url, method="get", params=params).json()
        for task in resp['tasks']:
            yield task
        if resp.get('continuationToken'):
            params = {'continuationToken': resp.get('continuationToken')}
        else:
            break
Exemple #20
0
def list_task_group_incomplete_tasks(task_group_id):
    """Generate the incomplete tasks in a task group"""
    params = {}
    while True:
        url = liburls.api(get_root_url(), 'queue', 'v1',
                          'task-group/{}/list'.format(task_group_id))
        resp = _do_request(url, force_get=True, params=params).json()
        for task in [t['status'] for t in resp['tasks']]:
            if task['state'] in ['running', 'pending', 'unscheduled']:
                yield task['taskId']
        if resp.get('continuationToken'):
            params = {'continuationToken': resp.get('continuationToken')}
        else:
            break
Exemple #21
0
def get_artifact_url(task_id, path, use_proxy=False):
    artifact_tmpl = liburls.api(get_root_url(), 'queue', 'v1',
                                'task/{}/artifacts/{}')
    data = artifact_tmpl.format(task_id, path)
    if use_proxy:
        # Until Bug 1405889 is deployed, we can't download directly
        # from the taskcluster-proxy.  Work around by using the /bewit
        # endpoint instead.
        # The bewit URL is the body of a 303 redirect, which we don't
        # want to follow (which fetches a potentially large resource).
        response = _do_request(os.environ['TASKCLUSTER_PROXY_URL'] + '/bewit',
                               data=data,
                               allow_redirects=False)
        return response.text
    return data
Exemple #22
0
def send_email(address, subject, content, link, use_proxy=False):
    """Sends an email using the notify service"""
    logger.info('Sending email to {}.'.format(address))
    if use_proxy:
        # Until bug 1460015 is finished, use the old baseUrl style of proxy URL
        url = os.environ['TASKCLUSTER_PROXY_URL'] + '/notify/v1/email'
    else:
        url = liburls.api(get_root_url(), 'notify', 'v1', 'email')
    _do_request(url,
                json={
                    'address': address,
                    'subject': subject,
                    'content': content,
                    'link': link,
                })
Exemple #23
0
def list_task_group_tasks(task_group_id):
    """Generate the tasks in a task group"""
    params = {}
    while True:
        url = liburls.api(
            get_root_url(False),
            "queue",
            "v1",
            f"task-group/{task_group_id}/list",
        )
        resp = _do_request(url, method="get", params=params).json()
        yield from resp["tasks"]
        if resp.get("continuationToken"):
            params = {"continuationToken": resp.get("continuationToken")}
        else:
            break
Exemple #24
0
def download_artifact(root_url, task_id, path):
    """
    Downloads a Taskcluster artifact.
    Supports specific file formats like json and yaml.

    Returns either the parsed json, the parsed yaml or the plain response.
    """
    artifact_url = taskcluster_urls.api(
        root_url, 'queue', 'v1', 'task/{}/artifacts/{}'.format(task_id, path))

    if path.endswith(".json"):
        return fetch_json(artifact_url)
    if path.endswith(".yml"):
        return yaml.safe_load(fetch_text(artifact_url))

    return make_request(artifact_url)
Exemple #25
0
class TestBuildSignedUrl(ClientTest):

    apiPath = liburls.api(ClientTest.test_root_url, 'fake', 'v1',
                          'two_args_no_input/arg0/arg1')

    def test_builds_surl_positional(self):
        actual = self.client.buildSignedUrl('two_args_no_input', 'arg0',
                                            'arg1')
        actual = re.sub('bewit=[^&]*', 'bewit=X', actual)
        self.assertEqual(self.apiPath + '?bewit=X', actual)

    def test_builds_surl_keyword(self):
        actual = self.client.buildSignedUrl('two_args_no_input',
                                            arg0='arg0',
                                            arg1='arg1')
        actual = re.sub('bewit=[^&]*', 'bewit=X', actual)
        self.assertEqual(self.apiPath + '?bewit=X', actual)
Exemple #26
0
def get_artifact_url(task_id, path, use_proxy=False):
    artifact_tmpl = liburls.api(
        get_root_url(False), "queue", "v1", "task/{}/artifacts/{}"
    )
    data = six.ensure_text(artifact_tmpl.format(task_id, path))
    if use_proxy:
        # Until Bug 1405889 is deployed, we can't download directly
        # from the taskcluster-proxy.  Work around by using the /bewit
        # endpoint instead.
        # The bewit URL is the body of a 303 redirect, which we don't
        # want to follow (which fetches a potentially large resource).
        response = _do_request(
            os.environ["TASKCLUSTER_PROXY_URL"] + "/bewit",
            data=data,
            allow_redirects=False,
        )
        return six.ensure_text(response.text)
    return data
Exemple #27
0
def list_task_group_incomplete_tasks(task_group_id):
    """Generate the incomplete tasks in a task group"""
    params = {}
    while True:
        url = liburls.api(
            get_root_url(False),
            "queue",
            "v1",
            f"task-group/{task_group_id}/list",
        )
        resp = _do_request(url, force_get=True, params=params).json()
        for task in [t["status"] for t in resp["tasks"]]:
            if task["state"] in ["running", "pending", "unscheduled"]:
                yield task["taskId"]
        if resp.get("continuationToken"):
            params = {"continuationToken": resp.get("continuationToken")}
        else:
            break
Exemple #28
0
async def addArtifactUploadedLinks(root_url, taskId, runId, job, session):
    artifacts = []
    try:
        artifacts = await fetchArtifacts(root_url, taskId, runId, session)
    except Exception:
        logger.debug("Artifacts could not be found for task: %s run: %s",
                     taskId, runId)
        return job

    seen = {}
    links = []
    for artifact in artifacts:
        name = os.path.basename(artifact["name"])
        # Bug 1595902 - It seems that directories are showing up as artifacts; skip them
        if not name:
            continue
        if not seen.get(name):
            seen[name] = [artifact["name"]]
        else:
            seen[name].append(artifact["name"])
            name = "{name} ({length})".format(name=name,
                                              length=len(seen[name]) - 1)

        links.append({
            "label":
            "artifact uploaded",
            "linkText":
            name,
            "url":
            taskcluster_urls.api(
                root_url,
                "queue",
                "v1",
                "task/{taskId}/runs/{runId}/artifacts/{artifact_name}".format(
                    taskId=taskId, runId=runId,
                    artifact_name=artifact["name"]),
            ),
        })

    job["jobInfo"]["links"] = links
    return job
Exemple #29
0
def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False):
    url_tmpl = liburls.api(get_root_url(use_proxy), "purge-cache", "v1",
                           "purge-cache/{}/{}")
    return url_tmpl.format(provisioner_id, worker_type)
Exemple #30
0
def get_dependent_tasks_url(task_id, root_url=PRODUCTION_TASKCLUSTER_ROOT_URL):
    return liburls.api(root_url, "queue", "v1", f"task/{task_id}/dependents")
Exemple #31
0
def get_task_url(task_id):
    return liburls.api(PRODUCTION_TASKCLUSTER_ROOT_URL, "queue", "v1",
                       f"task/{task_id}")
Exemple #32
0
def get_current_scopes():
    """Get the current scopes.  This only makes sense in a task with the Taskcluster
    proxy enabled, where it returns the actual scopes accorded to the task."""
    auth_url = liburls.api(get_root_url(True), "auth", "v1", "scopes/current")
    resp = _do_request(auth_url)
    return resp.json().get("scopes", [])