예제 #1
0
def fetch(opts):
    """
    support fetching from scp sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    work_dir = opts.work_dir

    if not SCP.exists():
        err('unable to fetch package; scp is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    if not SCP.execute(['-o', 'BatchMode yes', site, cache_file],
                       cwd=work_dir):
        err('unable to secure-copied file from target')
        return None
    log('successfully secure-copied file from target')

    return cache_file
예제 #2
0
def _validate_cache(cache_dir):
    """
    validate an existing cache directory to fetch on

    A fetch operation may occur on an existing cache directory, typically when
    a force-fetch or a configured revision has changed. This call helps
    validate the existing cache directory (from a bad state such as a corrupted
    repository). If a cache directory does exist,

    Args:
        cache_dir: the cache/bare repository to fetch into

    Returns:
        a 2-tuple (if a cache directory exists; and if validation failed)
    """

    git_dir = '--git-dir=' + cache_dir

    bad_validation = False
    has_cache = False
    if os.path.isdir(cache_dir):
        log('cache directory detected; validating')
        if GIT.execute([git_dir, 'rev-parse'], cwd=cache_dir, quiet=True):
            debug('cache directory validated')
            has_cache = True
        else:
            log('cache directory has errors; will re-downloaded')
            if not path_remove(cache_dir):
                err(
                    'unable to cleanup cache folder for package\n'
                    ' (cache folder: {})', cache_dir)
                bad_validation = True

    return has_cache, bad_validation
예제 #3
0
def extract(opts):
    """
    support extraction (checkout) of a mercurial cache into a build directory

    With provided extraction options (``RelengExtractOptions``), the extraction
    stage will be processed. A Mercurial extraction process will populate a
    working tree based off the cached Mercurial repository acquired from the
    fetch stage.

    Args:
        opts: the extraction options

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    revision = opts.revision
    work_dir = opts.work_dir

    if not HG.exists():
        err('unable to extract package; mercurial (hg) is not installed')
        return None

    log('checking out target revision into work tree')
    if not HG.execute(
        ['--verbose', 'clone', '--rev', revision, cache_dir, work_dir],
            cwd=work_dir):
        err('unable to checkout revision')
        return False

    return True
예제 #4
0
def _fetch_submodule(opts, name, cache_dir, revision, site):
    """
    fetch a submodule into a provided cache/bar repository

    Fetches an individual submodule into the provided cache directory. The
    origin of the submodule is provided via the ``site`` argument. A revision,
    if provided, can be used to help verify the target revision desired for a
    submodule; however, it is not required (e.g. when a repository does not set
    an explicit submodule revision).

    Args:
        opts: fetch options
        name: the name of the submodule (for state messages)
        cache_dir: the cache/bare repository to fetch into
        revision: the revision (branch, tag, hash) to fetch
        site: the site to fetch the submodule from

    Returns:
        ``True`` if the submodule has been fetched; ``False`` otherwise
    """
    git_dir = '--git-dir=' + cache_dir

    # check if we have the target revision cached; if so, submodule is ready
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        if not revision:
            return _sync_git_origin(cache_dir, site)

        if revision_exists(git_dir, revision) in REVISION_EXISTS:
            return _sync_git_origin(cache_dir, site)

    log('processing submodule (package: {}) {}...', opts.name, name)
    sys.stdout.flush()

    # validate any cache directory (if one exists)
    has_cache, bad_validation = _validate_cache(cache_dir)
    if bad_validation:
        return None

    # if we have no cache for this repository, build one
    if not has_cache:
        if not ensure_dir_exists(cache_dir):
            return False

        if not _create_bare_git_repo(cache_dir):
            return False

    # ensure configuration is properly synchronized
    if not _sync_git_origin(cache_dir, site):
        return False

    # fetch sources for this submodule
    desc = 'submodule ({}): {}'.format(opts.name, name)
    return _fetch_srcs(opts, cache_dir, revision, desc=desc)
예제 #5
0
def fetch(opts):
    """
    support fetching from svn sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    if not SVN.exists():
        err('unable to fetch package; svn is not installed')
        return None

    note('fetching {}...'.format(name))
    sys.stdout.flush()

    log('checking out sources')
    if not SVN.execute(['checkout', '-r', revision, site, work_dir],
                       cwd=work_dir):
        err('unable to checkout module')
        return None

    log('caching sources')

    def svn_exclude(file):
        if file.endswith('.svn'):
            return True
        return False

    # ensure cache file's directory exists
    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(work_dir, arcname=name, exclude=svn_exclude)

    return cache_file
예제 #6
0
    def validate(self, asc, target):
        """
        validate ascii-armored file against a target

        Accepting an ASCII-armored file, use gpg to validate the public key
        against the provided target.

        Args:
            asc: the asc file
            target: the target file

        Returns:
            ``True`` if the target has been validated; ``False`` otherwise
        """

        rv, out = self.execute_rv('--verify', asc, target)
        if rv == 0:
            verbose('validated: {}', asc)
        elif out:
            log(out)

        return rv == 0
예제 #7
0
def _verify_revision(git_dir, revision, quiet=False):
    """
    verify the gpg signature for a target revision

    The GPG signature for a provided revision (tag or commit) will be checked
    to validate the revision.

    Args:
        git_dir: the Git directory
        revision: the revision to verify
        quiet (optional): whether or not the log if verification is happening

    Returns:
        ``True`` if the revision is signed; ``False`` otherwise
    """

    if not quiet:
        log('verifying the gpg signature on the target revision')
    else:
        verbose('verifying the gpg signature on the target revision')

    if GIT.execute(
        [git_dir, 'rev-parse', '--quiet', '--verify', revision + '^{tag}'],
            quiet=True):
        verified_cmd = 'verify-tag'
    else:
        verified_cmd = 'verify-commit'

        # acquire the commit if (if not already set), to ensure we can verify
        # against commits or branches
        rv, revision = GIT.execute_rv(git_dir, 'rev-parse', revision)
        if rv != 0:
            verbose('failed to determine the commit id for a revision')
            return False

    return GIT.execute([git_dir, verified_cmd, revision], quiet=quiet)
예제 #8
0
def fetch(opts):
    """
    support fetching from cvs sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    cache_stem, __ = interpret_stem_extension(cache_basename)

    if not CVS.exists():
        err('unable to fetch package; cvs is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    try:
        cvsroot, module = site.rsplit(' ', 1)
    except ValueError:
        err('''\
improper cvs site defined

The provided CVS site does not define both the CVSROOT as well as the target
module to checkout. For example:

    :pserver:[email protected]:/var/lib/cvsroot mymodule

 Site: {}''', site)
        return None

    log('checking out sources')
    if not CVS.execute(['-d', cvsroot, 'checkout', '-d', cache_stem,
            '-r', revision, module], cwd=work_dir):
        err('unable to checkout module')
        return None

    cvs_module_dir = os.path.join(work_dir, cache_stem)
    if not os.path.exists(cvs_module_dir):
        err('no sources available for the provided revision')
        return None

    log('caching sources')
    def cvs_filter(info):
        if info.name.endswith('CVS'):
            return None
        return info

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(cvs_module_dir, arcname=cache_stem, filter=cvs_filter)

    return cache_file
예제 #9
0
def initialize_sample(opts):
    """
    initialize a sample project

    Generates a sample provided in the root directory to help new users or new
    project get started.

    Args:
        opts: options for this run

    Returns:
        ``True`` if the sample project could be initialized; ``False`` if an
        issue has occurred generating the sample project
    """

    root_dir = opts.root_dir

    if not ensure_dir_exists(root_dir):
        return False

    if os.listdir(root_dir):
        err('unable to initialize sample project is non-empty directory')
        return False

    sample_dir = os.path.join(root_dir, 'package', 'sample')

    success = True
    if ensure_dir_exists(sample_dir):
        # sample project
        sample_defs = os.path.join(root_dir, 'package', 'sample', 'sample')
        try:
            with open(sample_defs, 'w') as f:
                f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

SAMPLE_DEPENDENCIES = []
SAMPLE_LICENSE = ['<license name>']
SAMPLE_LICENSE_FILES = ['<license file>']
SAMPLE_SITE = '<location for sources>'
SAMPLE_TYPE = '<package-type>'
SAMPLE_VERSION = '<package-version>'
''')

            verbose('written sample file')
        except IOError as e:
            err('unable to generate a sample file')
            verbose(str(e))
            success = False
    else:
        success = False

    # .gitignore
    try:
        project_gitignore = os.path.join(root_dir,
                                         '.gitignore')  # (assumption)
        with open(project_gitignore, 'w') as f:
            f.write('''\
# releng-tool
/cache/
/dl/
/output/
.releng-flag-*
''')

        verbose('written .gitignore file')
    except IOError as e:
        err('unable to generate a .gitignore file')
        verbose(str(e))
        success = False

    # releng project
    try:
        project_defs = os.path.join(root_dir, 'releng')
        with open(project_defs, 'w') as f:
            f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

packages = [
    'sample',
]
''')

        verbose('written releng file')
    except IOError as e:
        err('unable to generate a releng file')
        verbose(str(e))
        success = False

    if success:
        log('initialized empty releng-tool project')
    else:
        warn('partially initialized a releng-tool project')
    return success
예제 #10
0
def fetch(opts):
    """
    support fetching from mercurial sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not HG.exists():
        err('unable to fetch package; hg (mercurial) is not installed')
        return None

    hg_dir = ['--repository', cache_dir]

    # check if we have the target revision; if so, full stop
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        if HG.execute(hg_dir + ['--quiet', 'log', '--rev', revision],
                      cwd=cache_dir,
                      quiet=True):
            return cache_dir

    note('fetching {}...', name)
    sys.stdout.flush()

    # if we have no cache for this repository, build one
    if not os.path.isdir(cache_dir):
        if not ensure_dir_exists(cache_dir):
            return None

        if not HG.execute([
                '--noninteractive', '--verbose', 'clone', '--noupdate', site,
                cache_dir
        ],
                          cwd=cache_dir):
            err('unable to clone mercurial repository')
            return None

    log('fetching most recent sources')
    if not HG.execute(hg_dir + ['--noninteractive', '--verbose', 'pull'],
                      cwd=cache_dir):
        err('unable to fetch from remote repository')
        return None

    log('verifying target revision exists')
    if not HG.execute(hg_dir + ['--quiet', 'log', '--rev', revision],
                      cwd=cache_dir,
                      quiet=True):
        err(
            'unable to find matching revision in repository: {}\n'
            ' (revision: {})', name, revision)
        return None

    return cache_dir
예제 #11
0
def _fetch_srcs(opts, cache_dir, revision, desc=None, refspecs=None):
    """
    invokes a git fetch call of the configured origin into a bare repository

    With a provided cache directory (``cache_dir``; bare repository), fetch the
    contents of a configured origin into the directory. The fetch call will
    use a restricted depth, unless configured otherwise. In the event a target
    revision cannot be found (if provided), an unshallow fetch will be made.

    This call may be invoked without a revision provided -- specifically, this
    can occur for submodule configurations which do not have a specific revision
    explicitly set.

    Args:
        opts: fetch options
        cache_dir: the bare repository to fetch into
        revision: expected revision desired from the repository
        desc (optional): description to use for error message
        refspecs (optional): additional refspecs to add to the fetch call

    Returns:
        ``True`` if the fetch was successful; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir

    if not desc:
        desc = 'repository: {}'.format(opts.name)

    log('fetching most recent sources')
    prepared_fetch_cmd = [
        git_dir,
        'fetch',
        '--progress',
        '--prune',
        'origin',
    ]

    # limit fetch depth
    target_depth = 1
    if opts._git_depth is not None:
        target_depth = opts._git_depth
    limited_fetch = (target_depth
                     and 'releng.git.no_depth' not in opts._quirks)

    depth_cmds = [
        '--depth',
        str(target_depth),
    ]

    # if a revision is provided, first attempt to do a revision-specific fetch
    quick_fetch = 'releng.git.no_quick_fetch' not in opts._quirks
    if revision and quick_fetch:
        ls_cmd = [
            'ls-remote',
            '--exit-code',
            'origin',
        ]
        debug('checking if tag exists on remote')
        if GIT.execute(ls_cmd + ['--tags', 'refs/tags/{}'.format(revision)],
                       cwd=cache_dir,
                       quiet=True):
            debug('attempting a tag reference fetch operation')
            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append('+refs/tags/{0}:refs/tags/{0}'.format(revision))
            if limited_fetch:
                fetch_cmd.extend(depth_cmds)

            if GIT.execute(fetch_cmd, cwd=cache_dir):
                debug('found the reference')
                return True

        debug('checking if reference exists on remote')
        if GIT.execute(ls_cmd + ['--heads', 'refs/heads/{}'.format(revision)],
                       cwd=cache_dir,
                       quiet=True):
            debug('attempting a head reference fetch operation')
            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append(
                '+refs/heads/{0}:refs/remotes/origin/{0}'.format(revision))
            if limited_fetch:
                fetch_cmd.extend(depth_cmds)

            if GIT.execute(fetch_cmd, cwd=cache_dir):
                debug('found the reference')
                return True

    # fetch standard (and configured) refspecs
    std_refspecs = [
        '+refs/heads/*:refs/remotes/origin/*',
        '+refs/tags/*:refs/tags/*',
    ]
    prepared_fetch_cmd.extend(std_refspecs)

    # allow fetching addition references if configured (e.g. pull requests)
    if refspecs:
        for ref in refspecs:
            prepared_fetch_cmd.append(
                '+refs/{0}:refs/remotes/origin/{0}'.format(ref))

    fetch_cmd = list(prepared_fetch_cmd)
    if limited_fetch:
        fetch_cmd.extend(depth_cmds)

    if not GIT.execute(fetch_cmd, cwd=cache_dir):
        err('unable to fetch branches/tags from remote repository')
        return False

    if revision:
        verbose('verifying target revision exists')
        exists_state = revision_exists(git_dir, revision)
        if exists_state in REVISION_EXISTS:
            pass
        elif (exists_state == GitExistsType.MISSING_HASH and limited_fetch
              and opts._git_depth is None):
            warn('failed to find hash on depth-limited fetch; fetching all...')

            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append('--unshallow')

            if not GIT.execute(fetch_cmd, cwd=cache_dir):
                err('unable to unshallow fetch state')
                return False

            if revision_exists(git_dir, revision) not in REVISION_EXISTS:
                err(
                    'unable to find matching revision in {}\n'
                    ' (revision: {})', desc, revision)
                return False
        else:
            err('unable to find matching revision in {}\n'
                'revision: {})', desc, revision)
            return False

    return True
예제 #12
0
def fetch(opts):
    """
    support fetching from rsync sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    cache_stem, __ = interpret_stem_extension(cache_basename)

    if not RSYNC.exists():
        err('unable to fetch package; rsync is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    # options
    fetch_opts = {
        '--recursive': '',  # default recursive call
    }
    if opts.extra_opts:
        fetch_opts.update(expand(opts.extra_opts))

    # argument building
    fetch_args = []
    fetch_args.extend(prepare_arguments(fetch_opts))

    # sanity check provided arguments
    for fetch_arg in fetch_args:
        if '--remove-source-files' in fetch_arg:
            err('option `--remove-source-files` not permitted')
            return None
        elif not fetch_arg.startswith('-'):
            err('invalid fetch option provided:', fetch_arg)
            return None

    fetch_args.append(site)  # source directory
    fetch_args.append(work_dir)  # destination directory

    if not RSYNC.execute(fetch_args, cwd=work_dir):
        err('unable to rsync from source')
        return None
    log('successfully invoked rsync for source')

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(work_dir, arcname=cache_stem)

    return cache_file
예제 #13
0
def _process_submodules(opts, work_dir):
    """
    process submodules for an extracted repository

    After extracting a repository to a working tree, this call can be used to
    extract any tracked submodules configured on the repository. The
    ``.gitmodules`` file is parsed for submodules and caches will be populated
    for each submodule. This call is recursive.

    Args:
        opts: the extraction options
        work_dir: the working directory to look for submodules

    Returns:
        ``True`` if submodules have been processed; ``False`` otherwise
    """

    git_modules_file = os.path.join(work_dir, '.gitmodules')
    if not os.path.exists(git_modules_file):
        return True

    debug('parsing git submodules file: {}', git_modules_file)
    cfg = GIT.parse_cfg_file(git_modules_file)
    if not cfg:
        err('failed to parse git submodule')
        return False

    for sec_name in cfg.sections():
        if not sec_name.startswith('submodule'):
            continue

        if not cfg.has_option(sec_name, 'path') or \
                not cfg.has_option(sec_name, 'url'):
            debug('submodule section missing path/url')
            continue

        submodule_path = cfg.get(sec_name, 'path')
        submodule_revision = None
        if cfg.has_option(sec_name, 'branch'):
            submodule_revision = cfg.get(sec_name, 'branch')
        submodule_url = cfg.get(sec_name, 'url')
        log('extracting submodule ({}): {}', opts.name, submodule_path)
        debug('submodule revision: {}',
              submodule_revision if submodule_revision else '(none)')

        ckey = pkg_cache_key(submodule_url)
        root_cache_dir = os.path.abspath(
            os.path.join(opts.cache_dir, os.pardir))
        sm_cache_dir = os.path.join(root_cache_dir, ckey)

        postfix_path = os.path.split(submodule_path)
        sm_work_dir = os.path.join(work_dir, *postfix_path)

        if not _workdir_extract(sm_cache_dir, sm_work_dir, submodule_revision):
            return False

        # process nested submodules
        if not _process_submodules(opts, sm_work_dir):
            return False

    return True
예제 #14
0
def _workdir_extract(cache_dir, work_dir, revision):
    """
    extract a provided revision from a cache (bare) repository to a work tree

    Using a provided bare repository (``cache_dir``) and a working tree
    (``work_dir``), extract the contents of the repository using the providing
    ``revision`` value. This call will force the working directory to match the
    target revision. In the case where the work tree is diverged, the contents
    will be replaced with the origin's revision.

    Args:
        cache_dir: the cache repository
        work_dir: the working directory
        revision: the revision

    Returns:
        ``True`` if the extraction has succeeded; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir
    work_tree = '--work-tree=' + work_dir

    # if a revision is not provided, extract the HEAD from the cache
    if not revision:
        revision = GIT.extract_submodule_revision(cache_dir)
        if not revision:
            return False

    log('checking out target revision into work tree')
    if not GIT.execute([
            git_dir, work_tree, '-c', 'advice.detachedHead=false', 'checkout',
            '--force', revision
    ],
                       cwd=work_dir):
        err('unable to checkout revision')
        return False

    log('ensure target revision is up-to-date in work tree')
    origin_revision = 'origin/{}'.format(revision)
    output = []
    if GIT.execute(
        [git_dir, 'rev-parse', '--quiet', '--verify', origin_revision],
            quiet=True,
            capture=output):
        remote_revision = ''.join(output)

        output = []
        GIT.execute([git_dir, 'rev-parse', '--quiet', '--verify', 'HEAD'],
                    quiet=True,
                    capture=output)
        local_revision = ''.join(output)

        debug('remote revision: {}', remote_revision)
        debug('local revision: {}', local_revision)

        if local_revision != remote_revision:
            warn('diverged revision detected; attempting to correct...')
            if not GIT.execute([
                    git_dir,
                    work_tree,
                    'reset',
                    '--hard',
                    origin_revision,
            ],
                               cwd=work_dir):
                err('unable to checkout revision')
                return False

    return True
예제 #15
0
def fetch(opts):
    """
    support fetching from bzr sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not BZR.exists():
        err('unable to fetch package; bzr is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    export_opts = [
        'export',
        cache_file,
        site,
        '--format=tgz',
        '--root=' + name,
        '--revision=' + revision,
    ]

    # some environments may have issue export bzr sources due to certificate
    # issues; this quirk allows injecting certifi-provided certificates for
    # all bzr exports
    if 'releng.bzr.certifi' in opts._quirks:
        global CERTIFI_MISSING_WARNED

        if certifi:
            verbose('performing bzr fetch with certifi certificates')
            pkg_site = certifi.where()
            export_opts.append('-Ossl.ca_certs=' + pkg_site)
        elif not CERTIFI_MISSING_WARNED:
            CERTIFI_MISSING_WARNED = True
            warn('''\
unable to perform bzr fetch with certifi certificates

A quirk has been enabled to export bzr images using certifi
certificates; however, certifi is not installed on this system.
''')

    log('exporting sources')
    if not BZR.execute(export_opts, poll=True):
        err('unable to export module')
        return None

    return cache_file
예제 #16
0
def fetch(opts):
    """
    support fetching from url sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    is_mirror_attempt = opts._mirror
    urlopen_context = opts._urlopen_context

    filename = os.path.basename(cache_file)

    note('fetching {}...', name)
    sys.stdout.flush()

    log('requesting: ' + site)
    try:
        with contextlib.closing(urlopen(site, context=urlopen_context)) as rsp:
            total = 0
            if 'content-length' in rsp.headers:
                try:
                    total = int(rsp.headers['content-length'])
                    total_str = display_size(total)
                except ValueError:
                    pass

            read = 0
            with open(cache_file, 'wb') as f:
                while True:
                    buf = rsp.read(REQUEST_READ_BLOCKSIZE)
                    if not buf:
                        break
                    read += len(buf)
                    read_str = display_size(read)

                    if total != read:
                        if total > 0:
                            pct = 100 * float(read) / float(total)
                            print(
                                '[{:02.0f}%] {}: {} of {}            '.format(
                                    pct, filename, read_str, total_str),
                                end='\r')
                        else:
                            print(' {}: {}            '.format(
                                filename, read_str),
                                  end='\r')

                    f.write(buf)
    except Exception as e:
        log_func = warn if is_mirror_attempt else err
        log_func('failed to download resource\n' '    {}', e)
        return None

    # cleanup any download progress prints
    if read > 0:
        log('')

    log('completed download ({})', display_size(read))
    return cache_file