示例#1
0
def fetch(opts):
    """
    support fetching from scp sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    work_dir = opts.work_dir

    if not SCP.exists():
        err('unable to fetch package; scp is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    if not SCP.execute(['-o', 'BatchMode yes', site, cache_file],
                       cwd=work_dir):
        err('unable to secure-copied file from target')
        return None
    log('successfully secure-copied file from target')

    return cache_file
示例#2
0
def fetch(opts):
    """
    support fetching from svn sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    if not SVN.exists():
        err('unable to fetch package; svn is not installed')
        return None

    note('fetching {}...'.format(name))
    sys.stdout.flush()

    log('checking out sources')
    if not SVN.execute(['checkout', '-r', revision, site, work_dir],
                       cwd=work_dir):
        err('unable to checkout module')
        return None

    log('caching sources')

    def svn_exclude(file):
        if file.endswith('.svn'):
            return True
        return False

    # ensure cache file's directory exists
    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(work_dir, arcname=name, exclude=svn_exclude)

    return cache_file
示例#3
0
    def _stage_exec(self, pkg):
        """
        execute a command for a specific package

        Provides a user the ability to invoke a command in a package's
        extracted directory. This is a helper if a user wishes to invoke/test
        commands for a package without needing to navigate to the package's
        build directory and invoking them their.

        Args:
            pkg: the package being processed

        Raises:
            RelengToolExecStageFailure: when the command returns non-zero value
            RelengToolMissingExecCommand: when no command is provided
        """

        exec_cmd = self.opts.target_action_exec
        if not exec_cmd:
            raise RelengToolMissingExecCommand(pkg.name)

        note('execution for {}...', pkg.name)
        debug('dir: {}', pkg.build_tree)
        debug('cmd: {}', exec_cmd)
        sys.stdout.flush()

        proc = subprocess.Popen(
            exec_cmd,
            cwd=pkg.build_tree,
            shell=True,
        )
        proc.communicate()
        sys.stdout.flush()

        if proc.returncode != 0:
            raise RelengToolExecStageFailure
示例#4
0
def fetch(opts):
    """
    support fetching from cvs sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    cache_stem, __ = interpret_stem_extension(cache_basename)

    if not CVS.exists():
        err('unable to fetch package; cvs is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    try:
        cvsroot, module = site.rsplit(' ', 1)
    except ValueError:
        err('''\
improper cvs site defined

The provided CVS site does not define both the CVSROOT as well as the target
module to checkout. For example:

    :pserver:[email protected]:/var/lib/cvsroot mymodule

 Site: {}''', site)
        return None

    log('checking out sources')
    if not CVS.execute(['-d', cvsroot, 'checkout', '-d', cache_stem,
            '-r', revision, module], cwd=work_dir):
        err('unable to checkout module')
        return None

    cvs_module_dir = os.path.join(work_dir, cache_stem)
    if not os.path.exists(cvs_module_dir):
        err('no sources available for the provided revision')
        return None

    log('caching sources')
    def cvs_filter(info):
        if info.name.endswith('CVS'):
            return None
        return info

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(cvs_module_dir, arcname=cache_stem, filter=cvs_filter)

    return cache_file
示例#5
0
def stage(engine, pkg, script_env):
    """
    handles the configuration stage for a package

    With a provided engine and package instance, the configuration stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being configured
        script_env: script environment information

    Returns:
        ``True`` if the configuration stage is completed; ``False`` otherwise
    """

    note('configuring {}...', pkg.name)
    sys.stdout.flush()

    # ignore configuration step for types which do not have one
    if pkg.type == PackageType.PYTHON:
        return True

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    pkg_install_type = NC(pkg.install_type, PackageInstallType.TARGET)

    configure_opts = RelengConfigureOptions()
    replicate_package_attribs(configure_opts, pkg)
    configure_opts.build_dir = build_dir
    configure_opts.build_output_dir = pkg.build_output_dir
    configure_opts.conf_defs = pkg.conf_defs
    configure_opts.conf_env = pkg.conf_env
    configure_opts.conf_opts = pkg.conf_opts
    configure_opts.def_dir = pkg.def_dir
    configure_opts.env = script_env
    configure_opts.ext = pkg.ext_modifiers
    configure_opts.host_dir = engine.opts.host_dir
    configure_opts.install_type = pkg_install_type
    configure_opts.name = pkg.name
    configure_opts.prefix = NC(pkg.prefix, engine.opts.sysroot_prefix)
    configure_opts.staging_dir = engine.opts.staging_dir
    configure_opts.symbols_dir = engine.opts.symbols_dir
    configure_opts.target_dir = engine.opts.target_dir
    configure_opts.version = pkg.version
    configure_opts._quirks = engine.opts.quirks

    # if package has a job-override value, use it over any global option
    if pkg.fixed_jobs:
        configure_opts.jobs = pkg.fixed_jobs
        configure_opts.jobsconf = pkg.fixed_jobs
    else:
        configure_opts.jobs = engine.opts.jobs
        configure_opts.jobsconf = engine.opts.jobsconf

    configurer = None
    if pkg.type in engine.registry.package_types:

        def _(opts):
            return engine.registry.package_types[pkg.type].configure(
                pkg.type, opts)

        configurer = _
    elif pkg.type == PackageType.AUTOTOOLS:
        configurer = conf_autotools
    elif pkg.type == PackageType.CMAKE:
        configurer = conf_cmake
    elif pkg.type == PackageType.SCRIPT:
        configurer = conf_script

    if not configurer:
        err('configurer type is not implemented: {}', pkg.type)
        return False

    with interim_working_dir(build_dir):
        configured = configurer(configure_opts)
        if not configured:
            return False

    return True
示例#6
0
def stage(engine, pkg, script_env):
    """
    handles the installation stage for a package

    With a provided engine and package instance, the installation stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being built
        script_env: script environment information

    Returns:
        ``True`` if the installation stage is completed; ``False`` otherwise
    """

    note('installing {}...', pkg.name)
    sys.stdout.flush()

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    pkg_install_type = NC(pkg.install_type, PackageInstallType.TARGET)

    if pkg_install_type == PackageInstallType.HOST:
        dest_dirs = [engine.opts.host_dir]
    elif pkg_install_type == PackageInstallType.IMAGES:
        dest_dirs = [engine.opts.images_dir]
    elif pkg_install_type == PackageInstallType.STAGING:
        dest_dirs = [engine.opts.staging_dir]
    elif pkg_install_type == PackageInstallType.STAGING_AND_TARGET:
        dest_dirs = [engine.opts.staging_dir, engine.opts.target_dir]
    else:
        # default to target directory
        dest_dirs = [engine.opts.target_dir]

    install_opts = RelengInstallOptions()
    replicate_package_attribs(install_opts, pkg)
    install_opts.build_dir = build_dir
    install_opts.build_output_dir = pkg.build_output_dir
    install_opts.cache_file = pkg.cache_file
    install_opts.def_dir = pkg.def_dir
    install_opts.dest_dirs = dest_dirs
    install_opts.env = script_env
    install_opts.ext = pkg.ext_modifiers
    install_opts.host_dir = engine.opts.host_dir
    install_opts.images_dir = engine.opts.images_dir
    install_opts.install_defs = pkg.install_defs
    install_opts.install_env = pkg.install_env
    install_opts.install_opts = pkg.install_opts
    install_opts.install_type = pkg_install_type
    install_opts.name = pkg.name
    install_opts.prefix = NC(pkg.prefix, engine.opts.sysroot_prefix)
    install_opts.staging_dir = engine.opts.staging_dir
    install_opts.symbols_dir = engine.opts.symbols_dir
    install_opts.target_dir = engine.opts.target_dir
    install_opts.version = pkg.version
    install_opts._quirks = engine.opts.quirks

    installer = None
    if pkg.type in engine.registry.package_types:
        def _(opts):
            return engine.registry.package_types[pkg.type].install(
                pkg.type, opts)
        installer = _
    elif pkg.type == PackageType.AUTOTOOLS:
        installer = install_autotools
    elif pkg.type == PackageType.CMAKE:
        installer = install_cmake
    elif pkg.type == PackageType.PYTHON:
        installer = install_python
    elif pkg.type == PackageType.SCRIPT:
        installer = install_script

    if not installer:
        err('installer type is not implemented: {}', pkg.type)
        return False

    with interim_working_dir(build_dir):
        installed = installer(install_opts)
        if not installed:
            return False

    return True
示例#7
0
def stage(engine, pkg, script_env):
    """
    handles the patching stage for a package

    With a provided engine and package instance, the patching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being patched
        script_env: script environment information

    Returns:
        ``True`` if the patching stage is completed; ``False`` otherwise
    """

    if pkg.is_internal:
        # packages flagged for local sources do not have a patch stage
        if pkg.local_srcs:
            return True

        # internal packages in development mode that specify a development
        # revision will not perform the patch stage
        if engine.opts.devmode and pkg.has_devmode_option:
            return True

    note('patching {}...', pkg.name)
    sys.stdout.flush()

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    patch_script_filename = '{}-{}'.format(pkg.name, PATCH_SCRIPT)
    patch_script = os.path.join(pkg.def_dir, patch_script_filename)
    if os.path.isfile(patch_script):
        try:
            run_path(patch_script, init_globals=script_env)

            verbose('patch script executed: ' + patch_script)
        except Exception as e:
            err('error running patch script: \n' '    {}', patch_script, e)
            return False

    # find all patches in the package's folder, sort and apply each
    patch_glob = os.path.join(pkg.def_dir, '*.patch')
    patches = glob(patch_glob)
    if patches:
        patches = sorted(patches)
        if not PATCH.exists():
            err('unable to apply patches; patch is not installed')
            return False

        for patch in patches:
            print('({})'.format(os.path.basename(patch)))

            if not PATCH.execute([
                    '--batch',
                    '--forward',
                    '--ignore-whitespace',
                    '--input={}'.format(patch),
                    '--strip=1',
            ],
                                 cwd=build_dir):
                err('failed to apply patch')
                return False

    return True
示例#8
0
def fetch(opts):
    """
    support fetching from mercurial sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not HG.exists():
        err('unable to fetch package; hg (mercurial) is not installed')
        return None

    hg_dir = ['--repository', cache_dir]

    # check if we have the target revision; if so, full stop
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        if HG.execute(hg_dir + ['--quiet', 'log', '--rev', revision],
                      cwd=cache_dir,
                      quiet=True):
            return cache_dir

    note('fetching {}...', name)
    sys.stdout.flush()

    # if we have no cache for this repository, build one
    if not os.path.isdir(cache_dir):
        if not ensure_dir_exists(cache_dir):
            return None

        if not HG.execute([
                '--noninteractive', '--verbose', 'clone', '--noupdate', site,
                cache_dir
        ],
                          cwd=cache_dir):
            err('unable to clone mercurial repository')
            return None

    log('fetching most recent sources')
    if not HG.execute(hg_dir + ['--noninteractive', '--verbose', 'pull'],
                      cwd=cache_dir):
        err('unable to fetch from remote repository')
        return None

    log('verifying target revision exists')
    if not HG.execute(hg_dir + ['--quiet', 'log', '--rev', revision],
                      cwd=cache_dir,
                      quiet=True):
        err(
            'unable to find matching revision in repository: {}\n'
            ' (revision: {})', name, revision)
        return None

    return cache_dir
示例#9
0
def fetch(opts):
    """
    support fetching from git sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    name = opts.name
    revision = opts.revision

    if not GIT.exists():
        err('unable to fetch package; git is not installed')
        return None

    git_dir = '--git-dir=' + cache_dir

    # check if we have the target revision cached; if so, package is ready
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        erv = revision_exists(git_dir, revision)
        if erv in REVISION_EXISTS:
            # ensure configuration is properly synchronized
            if not _sync_git_configuration(opts):
                return None

            # if no explicit ignore-cache request and if the revision is a
            # branch, force ignore-cache on and allow fetching to proceed
            if opts.ignore_cache is None and erv == GitExistsType.EXISTS_BRANCH:
                opts.ignore_cache = True
            # return cache dir if not verifying or verification succeeds
            elif not opts._git_verify_revision or _verify_revision(
                    git_dir, revision, quiet=True):
                return cache_dir

    note('fetching {}...', name)
    sys.stdout.flush()

    # validate any cache directory (if one exists)
    has_cache, bad_validation = _validate_cache(cache_dir)
    if bad_validation:
        return None

    # if we have no cache for this repository, build one
    if not has_cache:
        if not ensure_dir_exists(cache_dir):
            return None

        if not _create_bare_git_repo(cache_dir):
            return None

    # ensure configuration is properly synchronized
    if not _sync_git_configuration(opts):
        return None

    # fetch sources for this repository
    if not _fetch_srcs(opts, cache_dir, revision, refspecs=opts._git_refspecs):
        return None

    # verify revision (if configured to check it)
    if opts._git_verify_revision:
        if not _verify_revision(git_dir, revision):
            err(
                '''\
failed to validate git revision

Package has been configured to require the verification of the GPG signature
for the target revision. The verification has failed. Ensure that the revision
is signed and that the package's public key has been registered in the system.

      Package: {}
     Revision: {}''', name, revision)
            return None

    # fetch submodules (if configured to do so)
    if opts._git_submodules:
        if not _fetch_submodules(opts, cache_dir, revision):
            return None

    return cache_dir
示例#10
0
def stage(engine, pkg):
    """
    handles the extraction stage for a package

    With a provided engine and package instance, the extraction stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being extracted

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    # packages flagged for local sources do not have an extraction stage
    if pkg.local_srcs:
        return True

    # skip packages flagged not to extract
    if pkg.no_extraction:
        return True

    note('extracting {}...', pkg.name)
    sys.stdout.flush()

    extract_opts = RelengExtractOptions()
    replicate_package_attribs(extract_opts, pkg)
    extract_opts.cache_dir = pkg.cache_dir
    extract_opts.cache_file = pkg.cache_file
    extract_opts.ext = pkg.ext_modifiers
    extract_opts.name = pkg.name
    extract_opts.revision = pkg.revision
    extract_opts.strip_count = pkg.strip_count
    extract_opts.version = pkg.version
    extract_opts._extract_override = engine.opts.extract_override
    extract_opts._quirks = engine.opts.quirks

    if os.path.exists(pkg.build_dir):
        warn('build directory exists before extraction; removing')

        if not path_remove(pkg.build_dir):
            err('unable to cleanup build directory: ' + pkg.build_dir)
            return False

    # prepare and step into the a newly created working directory
    #
    # An extractor will take the contents of an archive, cache directory or
    # other fetched content and populate the "work" directory. On successful
    # extraction (or moving resources), the work directory will be moved to the
    # package's respective build directory.
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir:
        with interim_working_dir(work_dir):
            extract_opts.work_dir = work_dir

            extracter = None
            hash_exclude = []
            extract_types = engine.registry.extract_types
            if pkg.extract_type and pkg.extract_type in extract_types:
                def _(opts):
                    return engine.registry.extract_types[pkg.vcs_type].extract(
                        pkg.vcs_type, opts)
                extracter = _
            elif pkg.vcs_type in extract_types:
                extracter = extract_types[pkg.vcs_type].extract
            elif pkg.vcs_type == VcsType.GIT:
                extracter = extract_git
            elif pkg.vcs_type == VcsType.HG:
                extracter = extract_mercurial
            elif os.path.isfile(pkg.cache_file):
                cache_basename = os.path.basename(pkg.cache_file)
                hash_exclude.append(cache_basename)
                extracter = extract_archive

            if not extracter:
                err('extract type is not implemented: {}', pkg.vcs_type)
                return False

            # perform the extract request
            extracted = extracter(extract_opts)
            if not extracted:
                return False

            result = verify_hashes(pkg.hash_file, work_dir, hash_exclude)
            if result == HashResult.VERIFIED:
                pass
            elif result == HashResult.BAD_PATH:
                if not pkg.is_internal:
                    warn('missing hash file for package: ' + pkg.name)
            elif result == HashResult.EMPTY:
                if not pkg.is_internal:
                    verbose('hash file for package is empty: ' + pkg.name)
            elif result in (HashResult.BAD_FORMAT, HashResult.MISMATCH,
                    HashResult.MISSING_LISTED, HashResult.UNSUPPORTED):
                return False
            else:
                err('invalid extract operation (internal error; '
                    'hash-check failure: {})', result)
                return False

        debug('extraction successful; moving sources into package output '
            'directory: ' + pkg.build_dir)
        shutil.move(work_dir, pkg.build_dir)

    return True
示例#11
0
def fetch(opts):
    """
    support fetching from rsync sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    cache_stem, __ = interpret_stem_extension(cache_basename)

    if not RSYNC.exists():
        err('unable to fetch package; rsync is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    # options
    fetch_opts = {
        '--recursive': '',  # default recursive call
    }
    if opts.extra_opts:
        fetch_opts.update(expand(opts.extra_opts))

    # argument building
    fetch_args = []
    fetch_args.extend(prepare_arguments(fetch_opts))

    # sanity check provided arguments
    for fetch_arg in fetch_args:
        if '--remove-source-files' in fetch_arg:
            err('option `--remove-source-files` not permitted')
            return None
        elif not fetch_arg.startswith('-'):
            err('invalid fetch option provided:', fetch_arg)
            return None

    fetch_args.append(site)  # source directory
    fetch_args.append(work_dir)  # destination directory

    if not RSYNC.execute(fetch_args, cwd=work_dir):
        err('unable to rsync from source')
        return None
    log('successfully invoked rsync for source')

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(work_dir, arcname=cache_stem)

    return cache_file
示例#12
0
def fetch(opts):
    """
    support fetching from bzr sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not BZR.exists():
        err('unable to fetch package; bzr is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    export_opts = [
        'export',
        cache_file,
        site,
        '--format=tgz',
        '--root=' + name,
        '--revision=' + revision,
    ]

    # some environments may have issue export bzr sources due to certificate
    # issues; this quirk allows injecting certifi-provided certificates for
    # all bzr exports
    if 'releng.bzr.certifi' in opts._quirks:
        global CERTIFI_MISSING_WARNED

        if certifi:
            verbose('performing bzr fetch with certifi certificates')
            pkg_site = certifi.where()
            export_opts.append('-Ossl.ca_certs=' + pkg_site)
        elif not CERTIFI_MISSING_WARNED:
            CERTIFI_MISSING_WARNED = True
            warn('''\
unable to perform bzr fetch with certifi certificates

A quirk has been enabled to export bzr images using certifi
certificates; however, certifi is not installed on this system.
''')

    log('exporting sources')
    if not BZR.execute(export_opts, poll=True):
        err('unable to export module')
        return None

    return cache_file
示例#13
0
def stage(engine, pkg, script_env):
    """
    handles the build stage for a package

    With a provided engine and package instance, the build stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being built
        script_env: script environment information

    Returns:
        ``True`` if the build stage is completed; ``False`` otherwise
    """

    note('building {}...', pkg.name)
    sys.stdout.flush()

    if pkg.build_subdir:
        build_dir = pkg.build_subdir
    else:
        build_dir = pkg.build_dir

    build_opts = RelengBuildOptions()
    replicate_package_attribs(build_opts, pkg)
    build_opts.build_defs = pkg.build_defs
    build_opts.build_dir = build_dir
    build_opts.build_env = pkg.build_env
    build_opts.build_opts = pkg.build_opts
    build_opts.build_output_dir = pkg.build_output_dir
    build_opts.def_dir = pkg.def_dir
    build_opts.env = script_env
    build_opts.ext = pkg.ext_modifiers
    build_opts.host_dir = engine.opts.host_dir
    build_opts.name = pkg.name
    build_opts.prefix = NC(pkg.prefix, engine.opts.sysroot_prefix)
    build_opts.staging_dir = engine.opts.staging_dir
    build_opts.symbols_dir = engine.opts.symbols_dir
    build_opts.target_dir = engine.opts.target_dir
    build_opts.version = pkg.version
    build_opts._quirks = engine.opts.quirks

    # if package has a job-override value, use it over any global option
    if pkg.fixed_jobs:
        build_opts.jobs = pkg.fixed_jobs
        build_opts.jobsconf = pkg.fixed_jobs
    else:
        build_opts.jobs = engine.opts.jobs
        build_opts.jobsconf = engine.opts.jobsconf

    builder = None
    if pkg.type in engine.registry.package_types:
        def _(opts):
            return engine.registry.package_types[pkg.type].build(pkg.type, opts)
        builder = _
    elif pkg.type == PackageType.AUTOTOOLS:
        builder = build_autotools
    elif pkg.type == PackageType.CMAKE:
        builder = build_cmake
    elif pkg.type == PackageType.PYTHON:
        builder = build_python
    elif pkg.type == PackageType.SCRIPT:
        builder = build_script

    if not builder:
        err('build type is not implemented: {}', pkg.type)
        return False

    with interim_working_dir(build_dir):
        built = builder(build_opts)
        if not built:
            return False

    return True
示例#14
0
def fetch(opts):
    """
    support fetching from url sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    site = opts.site
    is_mirror_attempt = opts._mirror
    urlopen_context = opts._urlopen_context

    filename = os.path.basename(cache_file)

    note('fetching {}...', name)
    sys.stdout.flush()

    log('requesting: ' + site)
    try:
        with contextlib.closing(urlopen(site, context=urlopen_context)) as rsp:
            total = 0
            if 'content-length' in rsp.headers:
                try:
                    total = int(rsp.headers['content-length'])
                    total_str = display_size(total)
                except ValueError:
                    pass

            read = 0
            with open(cache_file, 'wb') as f:
                while True:
                    buf = rsp.read(REQUEST_READ_BLOCKSIZE)
                    if not buf:
                        break
                    read += len(buf)
                    read_str = display_size(read)

                    if total != read:
                        if total > 0:
                            pct = 100 * float(read) / float(total)
                            print(
                                '[{:02.0f}%] {}: {} of {}            '.format(
                                    pct, filename, read_str, total_str),
                                end='\r')
                        else:
                            print(' {}: {}            '.format(
                                filename, read_str),
                                  end='\r')

                    f.write(buf)
    except Exception as e:
        log_func = warn if is_mirror_attempt else err
        log_func('failed to download resource\n' '    {}', e)
        return None

    # cleanup any download progress prints
    if read > 0:
        log('')

    log('completed download ({})', display_size(read))
    return cache_file