Пример #1
0
async def async_main(args, signing_certs):
    tasks = []

    task = json.load(args.task_definition)
    # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
    for definition in task["extra"]["funsize"]["partials"]:
        workenv = WorkEnv(
            mar=definition.get('mar_binary'),
            mbsdiff=definition.get('mbsdiff_binary')
        )
        await workenv.setup()
        tasks.append(asyncio.ensure_future(retry_async(
                                           manage_partial,
                                           retry_exceptions=(
                                               aiohttp.ClientError,
                                               asyncio.TimeoutError
                                           ),
                                           kwargs=dict(
                                               partial_def=definition,
                                               filename_template=args.filename_template,
                                               artifacts_dir=args.artifacts_dir,
                                               work_env=workenv,
                                               signing_certs=signing_certs
                                           ))))
    manifest = await asyncio.gather(*tasks)
    return manifest
Пример #2
0
def test_retry_async_fail_first(event_loop):
    global retry_count
    retry_count['fail_first'] = 0
    status = event_loop.run_until_complete(
        utils.retry_async(fail_first, sleeptime_kwargs={'delay_factor': 0}))
    assert status == "yay"
    assert retry_count['fail_first'] == 2
Пример #3
0
async def async_main(args, signing_certs):
    tasks = []

    allowed_url_prefixes = list(ALLOWED_URL_PREFIXES)
    if args.allow_staging_prefixes:
        allowed_url_prefixes += STAGING_URL_PREFIXES

    task = json.load(args.task_definition)
    # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
    for definition in task["extra"]["funsize"]["partials"]:
        tasks.append(asyncio.ensure_future(retry_async(
                                           manage_partial,
                                           retry_exceptions=(
                                               aiohttp.ClientError,
                                               asyncio.TimeoutError
                                           ),
                                           kwargs=dict(
                                               partial_def=definition,
                                               filename_template=args.filename_template,
                                               artifacts_dir=args.artifacts_dir,
                                               allowed_url_prefixes=allowed_url_prefixes,
                                               signing_certs=signing_certs,
                                               arch=args.arch
                                           ))))
    manifest = await asyncio.gather(*tasks)
    return manifest
Пример #4
0
async def download_artifacts(context,
                             file_urls,
                             parent_dir=None,
                             session=None,
                             download_func=download_file,
                             valid_artifact_task_ids=None):
    """Download artifacts in parallel after validating their URLs.

    Valid ``taskId``s for download include the task's dependencies and the
    ``taskGroupId``, which by convention is the ``taskId`` of the decision task.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        file_urls (list): the list of artifact urls to download.
        parent_dir (str, optional): the path of the directory to download the
            artifacts into.  If None, defaults to ``work_dir``.  Default is None.
        session (aiohttp.ClientSession, optional): the session to use to download.
            If None, defaults to context.session.  Default is None.
        download_func (function, optional): the function to call to download the files.
            default is ``download_file``.
        valid_artifact_task_ids (list, optional): the list of task ids that are
            valid to download from.  If None, defaults to all task dependencies
            plus the decision taskId.  Defaults to None.

    Returns:
        list: the full paths to the files downloaded

    Raises:
        scriptworker.exceptions.BaseDownloadError: on download failure after
            any applicable retries.

    """
    parent_dir = parent_dir or context.config["work_dir"]
    session = session or context.session

    tasks = []
    files = []
    valid_artifact_rules = context.config["valid_artifact_rules"]
    # XXX when chain of trust is on everywhere, hardcode the chain of trust task list
    valid_artifact_task_ids = valid_artifact_task_ids or list(
        context.task["dependencies"] + [get_decision_task_id(context.task)])
    for file_url in file_urls:
        rel_path = validate_artifact_url(valid_artifact_rules,
                                         valid_artifact_task_ids, file_url)
        abs_file_path = os.path.join(parent_dir, rel_path)
        assert_is_parent(abs_file_path, parent_dir)
        files.append(abs_file_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func,
                    args=(context, file_url, abs_file_path),
                    retry_exceptions=(DownloadError, aiohttp.ClientError,
                                      asyncio.TimeoutError),
                    kwargs={"session": session},
                )))

    await raise_future_exceptions(tasks)
    return files
Пример #5
0
def get_temp_creds_from_file(config):
    """Retry _get_temp_creds_from_file
    """
    loop = asyncio.get_event_loop()
    return loop.run_until_complete(retry_async(
        _get_temp_creds_from_file, retry_exceptions=(ScriptWorkerTaskException,),
        args=(config, ),
    ))
Пример #6
0
def test_retry_async_fail_first(event_loop):
    global retry_count
    retry_count['fail_first'] = 0
    status = event_loop.run_until_complete(
        utils.retry_async(fail_first)
    )
    assert status == "yay"
    assert retry_count['fail_first'] == 2
Пример #7
0
def test_retry_async_always_fail(event_loop):
    global retry_count
    retry_count['always_fail'] = 0
    with mock.patch('asyncio.sleep', new=fake_sleep):
        with pytest.raises(ScriptWorkerException):
            status = event_loop.run_until_complete(
                utils.retry_async(always_fail,
                                  sleeptime_kwargs={'delay_factor': 0}))
            assert status is None
    assert retry_count['always_fail'] == 5
Пример #8
0
def get_temp_creds_from_file(config):
    """Retry _get_temp_creds_from_file
    """
    loop = asyncio.get_event_loop()
    return loop.run_until_complete(
        retry_async(
            _get_temp_creds_from_file,
            retry_exceptions=(ScriptWorkerTaskException, ),
            args=(config, ),
        ))
Пример #9
0
async def download_artifacts(context, file_urls, parent_dir=None, session=None,
                             download_func=download_file, valid_artifact_task_ids=None):
    """Download artifacts in parallel after validating their URLs.

    Valid ``taskId``s for download include the task's dependencies and the
    ``taskGroupId``, which by convention is the ``taskId`` of the decision task.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        file_urls (list): the list of artifact urls to download.
        parent_dir (str, optional): the path of the directory to download the
            artifacts into.  If None, defaults to ``work_dir``.  Default is None.
        session (aiohttp.ClientSession, optional): the session to use to download.
            If None, defaults to context.session.  Default is None.
        download_func (function, optional): the function to call to download the files.
            default is ``download_file``.
        valid_artifact_task_ids (list, optional): the list of task ids that are
            valid to download from.  If None, defaults to all task dependencies
            plus the decision taskId.  Defaults to None.

    Returns:
        list: the full paths to the files downloaded

    Raises:
        scriptworker.exceptions.BaseDownloadError: on download failure after
            any applicable retries.

    """
    parent_dir = parent_dir or context.config['work_dir']
    session = session or context.session

    tasks = []
    files = []
    valid_artifact_rules = context.config['valid_artifact_rules']
    # XXX when chain of trust is on everywhere, hardcode the chain of trust task list
    valid_artifact_task_ids = valid_artifact_task_ids or list(context.task['dependencies'] + [get_decision_task_id(context.task)])
    for file_url in file_urls:
        rel_path = validate_artifact_url(valid_artifact_rules, valid_artifact_task_ids, file_url)
        abs_file_path = os.path.join(parent_dir, rel_path)
        files.append(abs_file_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func, args=(context, file_url, abs_file_path),
                    retry_exceptions=(DownloadError, aiohttp.ClientError, asyncio.TimeoutError),
                    kwargs={'session': session},
                )
            )
        )

    await raise_future_exceptions(tasks)
    return files
Пример #10
0
async def download_artifacts(context,
                             file_urls,
                             parent_dir=None,
                             session=None,
                             download_func=download_file):
    """Download artifacts in parallel after validating their URLs.

    Valid `taskId`s for download include the task's dependencies and the
    `taskGroupId`, which by convention is the `taskId` of the decision task.

    Args:
        context (scriptworker.context.Context): the scriptworker context.
        file_urls (list): the list of artifact urls to download.
        parent_dir (str, optional): the path of the directory to download the
            artifacts into.  If None, defaults to `work_dir`.  Default is None.
        session (aiohttp.ClientSession, optional): the session to use to download.
            If None, defaults to context.session.  Default is None.
        download_func (function, optional): the function to call to download the files.
            default is `download_file`.

    Returns:
        list: the relative paths to the files downloaded, relative to
            `parent_dir`.

    Raises:
        scriptworker.exceptions.DownloadError: on download failure after
            max retries.
    """
    parent_dir = parent_dir or context.config['work_dir']
    session = session or context.session

    tasks = []
    files = []
    download_config = deepcopy(context.config)
    download_config.setdefault(
        'valid_artifact_task_ids',
        context.task['dependencies'] + [context.task['taskGroupId']])
    for file_url in file_urls:
        rel_path = validate_artifact_url(download_config, file_url)
        abs_file_path = os.path.join(parent_dir, rel_path)
        files.append(rel_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func,
                    args=(context, file_url, abs_file_path),
                    kwargs={'session': session},
                )))

    await raise_future_exceptions(tasks)
    return files
Пример #11
0
async def async_main(args, signing_cert):
    tasks = []

    allowed_url_prefixes = list(ALLOWED_URL_PREFIXES)
    if args.allow_staging_prefixes:
        allowed_url_prefixes += STAGING_URL_PREFIXES

    task = json.load(args.task_definition)

    downloads = await download_and_verify_mars(
        task["extra"]["funsize"]["partials"], allowed_url_prefixes, signing_cert
    )

    tools_dir = Path(tempfile.mkdtemp())
    await download_buildsystem_bits(
        partials_config=task["extra"]["funsize"]["partials"],
        downloads=downloads,
        tools_dir=tools_dir,
    )

    # May want to consider os.cpu_count() if we ever run on osx/win.
    # sched_getaffinity is the list of cores we can run on, not the total.
    semaphore = asyncio.Semaphore(len(os.sched_getaffinity(0)))
    for definition in task["extra"]["funsize"]["partials"]:
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    manage_partial,
                    retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError),
                    kwargs=dict(
                        partial_def=definition,
                        artifacts_dir=args.artifacts_dir,
                        tools_dir=tools_dir,
                        arch=args.arch,
                        downloads=downloads,
                        semaphore=semaphore,
                    ),
                )
            )
        )
    manifest = await asyncio.gather(*tasks)

    for url in downloads:
        downloads[url]["download_path"].unlink()
        shutil.rmtree(downloads[url]["extracted_path"])
    shutil.rmtree(tools_dir)

    return manifest
Пример #12
0
async def download_all_zip_artifacts(upstream_zip_definitions, session):
    async_tasks = tuple([
        asyncio.ensure_future(
            retry_async(download_file,
                        kwargs={
                            'context': None,
                            'url': definition['url'],
                            'abs_filename': definition['abs_filename'],
                            'session': session
                        },
                        retry_exceptions=(DownloadError, aiohttp.ClientError,
                                          asyncio.TimeoutError)))
        for definition in upstream_zip_definitions
    ])

    await raise_future_exceptions(async_tasks)
Пример #13
0
def download_artifacts(context, file_urls, parent_dir=None, session=None,
                             download_func=download_file):
    parent_dir = parent_dir or context.config['work_dir']
    session = session or context.session

    tasks = []
    files = []
    for file_url in file_urls:
        rel_path = file_url.rsplit('/', 1)[-1]
        abs_file_path = os.path.join(parent_dir, rel_path)
        files.append(abs_file_path)
        tasks.append(
            asyncio.ensure_future(
                retry_async(
                    download_func, args=(context, file_url, abs_file_path),
                    kwargs={'session': session},
                )
            )
        )

    return tasks, files