Exemple #1
0
async def get_props(context):
    source = get_manifest_url(context.task['payload'])

    # extra validation check is useful for the url scheme, netloc and path
    # restrictions
    beet_config = deepcopy(context.config)
    beet_config.setdefault('valid_artifact_task_ids', context.task['dependencies'])
    validate_artifact_url(beet_config, source)

    return (await retry_request(context, source, method='get',
                                return_type='json'))['properties']
Exemple #2
0
async def download_artifacts(context,
                             file_urls,
                             parent_dir=None,
                             session=None,
                             download_func=download_file,
                             valid_artifact_task_ids=None):
    """Download artifacts in parallel after validating their URLs.

    Valid ``taskId``s for download include the task's dependencies and the
    ``taskGroupId``, which by convention is the ``taskId`` of the decision task.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        file_urls (list): the list of artifact urls to download.
        parent_dir (str, optional): the path of the directory to download the
            artifacts into.  If None, defaults to ``work_dir``.  Default is None.
        session (aiohttp.ClientSession, optional): the session to use to download.
            If None, defaults to context.session.  Default is None.
        download_func (function, optional): the function to call to download the files.
            default is ``download_file``.
        valid_artifact_task_ids (list, optional): the list of task ids that are
            valid to download from.  If None, defaults to all task dependencies
            plus the decision taskId.  Defaults to None.

    Returns:
        list: the full paths to the files downloaded

    Raises:
        scriptworker.exceptions.BaseDownloadError: on download failure after
            any applicable retries.

    """
    parent_dir = parent_dir or context.config["work_dir"]
    session = session or context.session

    tasks = []
    files = []
    valid_artifact_rules = context.config["valid_artifact_rules"]
    # XXX when chain of trust is on everywhere, hardcode the chain of trust task list
    valid_artifact_task_ids = valid_artifact_task_ids or list(
        context.task["dependencies"] + [get_decision_task_id(context.task)])
    for file_url in file_urls:
        rel_path = validate_artifact_url(valid_artifact_rules,
                                         valid_artifact_task_ids, file_url)
        abs_file_path = os.path.join(parent_dir, rel_path)
        assert_is_parent(abs_file_path, parent_dir)
        files.append(abs_file_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func,
                    args=(context, file_url, abs_file_path),
                    retry_exceptions=(DownloadError, aiohttp.ClientError,
                                      asyncio.TimeoutError),
                    kwargs={"session": session},
                )))

    await raise_future_exceptions(tasks)
    return files
async def download_artifacts(context, file_urls, parent_dir=None, session=None,
                             download_func=download_file, valid_artifact_task_ids=None):
    """Download artifacts in parallel after validating their URLs.

    Valid ``taskId``s for download include the task's dependencies and the
    ``taskGroupId``, which by convention is the ``taskId`` of the decision task.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        file_urls (list): the list of artifact urls to download.
        parent_dir (str, optional): the path of the directory to download the
            artifacts into.  If None, defaults to ``work_dir``.  Default is None.
        session (aiohttp.ClientSession, optional): the session to use to download.
            If None, defaults to context.session.  Default is None.
        download_func (function, optional): the function to call to download the files.
            default is ``download_file``.
        valid_artifact_task_ids (list, optional): the list of task ids that are
            valid to download from.  If None, defaults to all task dependencies
            plus the decision taskId.  Defaults to None.

    Returns:
        list: the full paths to the files downloaded

    Raises:
        scriptworker.exceptions.BaseDownloadError: on download failure after
            any applicable retries.

    """
    parent_dir = parent_dir or context.config['work_dir']
    session = session or context.session

    tasks = []
    files = []
    valid_artifact_rules = context.config['valid_artifact_rules']
    # XXX when chain of trust is on everywhere, hardcode the chain of trust task list
    valid_artifact_task_ids = valid_artifact_task_ids or list(context.task['dependencies'] + [get_decision_task_id(context.task)])
    for file_url in file_urls:
        rel_path = validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, file_url)
        abs_file_path = os.path.join(parent_dir, rel_path)
        files.append(abs_file_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func, args=(context, file_url, abs_file_path),
                    retry_exceptions=(DownloadError, aiohttp.ClientError, asyncio.TimeoutError),
                    kwargs={'session': session},
                )
            )
        )

    await raise_future_exceptions(tasks)
    return files
Exemple #4
0
async def move_beet(context, source, destinations, locale, update_balrog_manifest):
    beet_config = deepcopy(context.config)
    beet_config.setdefault('valid_artifact_task_ids', context.task['dependencies'])
    rel_path = validate_artifact_url(beet_config, source)
    abs_file_path = os.path.join(context.config['work_dir'], rel_path)

    await retry_download(context=context, url=source, path=abs_file_path)
    await retry_upload(context=context, destinations=destinations, path=abs_file_path)

    if update_balrog_manifest:
        context.balrog_manifest.append(
            enrich_balrog_manifest(context, abs_file_path, locale, destinations)
        )
Exemple #5
0
async def download_artifacts(context,
                             file_urls,
                             parent_dir=None,
                             session=None,
                             download_func=download_file):
    """Download artifacts in parallel after validating their URLs.

    Valid `taskId`s for download include the task's dependencies and the
    `taskGroupId`, which by convention is the `taskId` of the decision task.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        file_urls (list): the list of artifact urls to download.
        parent_dir (str, optional): the path of the directory to download the
            artifacts into.  If None, defaults to `work_dir`.  Default is None.
        session (aiohttp.ClientSession, optional): the session to use to download.
            If None, defaults to context.session.  Default is None.
        download_func (function, optional): the function to call to download the files.
            default is `download_file`.

    Returns:
        list: the relative paths to the files downloaded, relative to
            `parent_dir`.

    Raises:
        scriptworker.exceptions.DownloadError: on download failure after
            max retries.
    """
    parent_dir = parent_dir or context.config['work_dir']
    session = session or context.session

    tasks = []
    files = []
    download_config = deepcopy(context.config)
    download_config.setdefault(
        'valid_artifact_task_ids',
        context.task['dependencies'] + [context.task['taskGroupId']])
    for file_url in file_urls:
        rel_path = validate_artifact_url(download_config, file_url)
        abs_file_path = os.path.join(parent_dir, rel_path)
        files.append(rel_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func,
                    args=(context, file_url, abs_file_path),
                    kwargs={'session': session},
                )))

    await raise_future_exceptions(tasks)
    return files
Exemple #6
0
def test_bad_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url):
    with pytest.raises(ScriptWorkerTaskException):
        client.validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url)
Exemple #7
0
def test_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url, expected):
    value = client.validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url)
    assert value == expected
Exemple #8
0
def test_bad_artifact_url(params):
    with pytest.raises(ScriptWorkerTaskException):
        client.validate_artifact_url(params[0], params[1])
Exemple #9
0
def test_artifact_url(params):
    value = client.validate_artifact_url(params[0], params[1])
    assert value == params[2]
def test_bad_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url):
    with pytest.raises(ScriptWorkerTaskException):
        client.validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url)
def test_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url, expected):
    value = client.validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, url)
    assert value == expected