Beispiel #1
0
async def create_artifact(context,
                          path,
                          target_path,
                          content_type,
                          content_encoding,
                          storage_type='s3',
                          expires=None):
    """Create an artifact and upload it.

    This should support s3 and azure out of the box; we'll need some tweaking
    if we want to support redirect/error artifacts.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        path (str): the path of the file to upload.
        target_path (str):
        content_type (str): Content type (MIME type) of the artifact. Values can be found via
            scriptworker.artifacts.guess_content_type_and_encoding()
        content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
            be found via scriptworker.artifacts.guess_content_type_and_encoding()
        storage_type (str, optional): the taskcluster storage type to use.
            Defaults to 's3'
        expires (str, optional): datestring of when the artifact expires.
            Defaults to None.

    Raises:
        ScriptWorkerRetryException: on failure.

    """
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type,
    }
    args = [
        get_task_id(context.claim_task),
        get_run_id(context.claim_task), target_path, payload
    ]

    tc_response = await context.temp_queue.createArtifact(*args)
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    loggable_url = get_loggable_url(tc_response['putUrl'])
    log.info("uploading {path} to {url}...".format(path=path,
                                                   url=loggable_url))
    with open(path, "rb") as fh:
        async with async_timeout.timeout(
                context.config['artifact_upload_timeout']):
            async with context.session.put(tc_response['putUrl'],
                                           data=fh,
                                           headers=_craft_artifact_put_headers(
                                               content_type, content_encoding),
                                           skip_auto_headers=skip_auto_headers,
                                           compress=False) as resp:
                log.info("create_artifact {}: {}".format(path, resp.status))
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status), )
Beispiel #2
0
async def create_artifact(context, path, target_path, content_type, content_encoding, storage_type='s3', expires=None):
    """Create an artifact and upload it.

    This should support s3 and azure out of the box; we'll need some tweaking
    if we want to support redirect/error artifacts.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        path (str): the path of the file to upload.
        target_path (str):
        content_type (str): Content type (MIME type) of the artifact. Values can be found via
            scriptworker.artifacts.guess_content_type_and_encoding()
        content_encoding (str): Encoding (per mimetypes' library) of the artifact. None is for no encoding. Values can
            be found via scriptworker.artifacts.guess_content_type_and_encoding()
        storage_type (str, optional): the taskcluster storage type to use.
            Defaults to 's3'
        expires (str, optional): datestring of when the artifact expires.
            Defaults to None.

    Raises:
        ScriptWorkerRetryException: on failure.

    """
    payload = {
        "storageType": storage_type,
        "expires": expires or get_expiration_arrow(context).isoformat(),
        "contentType": content_type,
    }
    args = [get_task_id(context.claim_task), get_run_id(context.claim_task),
            target_path, payload]

    tc_response = await context.temp_queue.createArtifact(*args)
    skip_auto_headers = [aiohttp.hdrs.CONTENT_TYPE]
    loggable_url = get_loggable_url(tc_response['putUrl'])
    log.info("uploading {path} to {url}...".format(path=path, url=loggable_url))
    with open(path, "rb") as fh:
        async with async_timeout.timeout(context.config['artifact_upload_timeout']):
            async with context.session.put(
                tc_response['putUrl'], data=fh, headers=_craft_artifact_put_headers(content_type, content_encoding),
                skip_auto_headers=skip_auto_headers, compress=False
            ) as resp:
                log.info("create_artifact {}: {}".format(path, resp.status))
                response_text = await resp.text()
                log.info(response_text)
                if resp.status not in (200, 204):
                    raise ScriptWorkerRetryException(
                        "Bad status {}".format(resp.status),
                    )
Beispiel #3
0
def test_get_loggable_url(url, expected):
    assert utils.get_loggable_url(url) == expected
def test_get_loggable_url(url, expected):
    assert utils.get_loggable_url(url) == expected