Ejemplo n.º 1
0
def interim_working_dir(dir_):
    """
    move into a context-supported working directory

    Moves the current context into the provided working directory ``dir``. When
    returned, the original working directory will be restored. If the provided
    directory does not exist, it will created. If the directory could not be
    created, an ``FailedToPrepareWorkingDirectoryError`` exception will be
    thrown.

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        with releng_wd('my-directory/'):
            # invoked in 'my-directory'

        # invoked in original working directory

    Args:
        dir_: the target working directory

    Raises:
        FailedToPrepareWorkingDirectoryError: the working directory does not
            exist and could not be created
    """
    owd = os.getcwd()

    if not ensure_dir_exists(dir_):
        raise FailedToPrepareWorkingDirectoryError(dir_)

    os.chdir(dir_)
    try:
        yield dir_
    finally:
        try:
            os.chdir(owd)
        except IOError:
            warn('unable to restore original working directory: ' + owd)
Ejemplo n.º 2
0
def generate_temp_dir(dir_=None):
    """
    generate a context-supported temporary directory

    Creates a temporary directory in the provided directory ``dir_`` (or system
    default, is not provided). This is a context-supported call and will
    automatically remove the directory when completed. If the provided
    directory does not exist, it will created. If the directory could not be
    created, an ``FailedToPrepareBaseDirectoryError`` exception will be thrown.

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        with releng_tmpdir() as dir_:
            print(dir_)

    Args:
        dir_ (optional): the directory to create the temporary directory in

    Raises:
        FailedToPrepareBaseDirectoryError: the base directory does not exist and
            could not be created
    """
    if dir_ and not ensure_dir_exists(dir_):
        raise FailedToPrepareBaseDirectoryError(dir_)

    dir_ = tempfile.mkdtemp(prefix='.releng-tmp-', dir=dir_)
    try:
        yield dir_
    finally:
        try:
            path_remove(dir_)
        except OSError as e:
            if e.errno != errno.ENOENT:
                warn('unable to cleanup temporary directory: {}\n'
                     '    {}', dir_, e)
Ejemplo n.º 3
0
    def _stage_license(self, pkg):
        """
        process license files for a specific package processing

        If a package contains one or more files containing licenses information,
        this information will be populated in the package's license folder.

        Args:
            pkg: the package being processed

        Returns:
            ``True`` if the license information was copied; ``False`` if these
            license information could not be copied
        """

        # skip if package has no license files
        if not pkg.license_files:
            if pkg.license and not pkg.is_internal and not pkg.no_extraction:
                warn('package defines no license files: ' + pkg.name)
            return True

        # ensure package-specific license directory exists
        pkg_license_dir = os.path.join(self.opts.license_dir, pkg.nv)
        if not ensure_dir_exists(pkg_license_dir):
            return False

        # copy over each license files
        for file in pkg.license_files:
            src = os.path.join(pkg.build_dir, file)
            dst = os.path.join(pkg_license_dir, file)

            if not path_copy(src, dst, critical=False):
                err('unable to copy license information: ' + pkg.name)
                return False

        return True
Ejemplo n.º 4
0
def extract(opts):
    """
    support extraction of an archive into a build directory

    With provided extraction options (``RelengExtractOptions``), the extraction
    stage will be processed. The archive's extension will be used in attempt to
    finding a matching tool/implementation which can be used to extract the
    contents of the file. In the event that the method of extraction cannot be
    determined, it will be assumed that the file is in fact not extractable.
    Files which are not extracted are just copied into the build directly (e.g.
    single resource files).

    Args:
        opts: the extraction options

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    strip_count = opts.strip_count
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    __, cache_ext = interpret_stem_extension(cache_basename)

    is_extractable = False
    if cache_ext:
        cache_ext = cache_ext.lower()

        # if the user defines a tool override for this extension type, use
        # whatever the user wants to use (passing the file and directory to
        # extract to)
        extract_override = getattr(opts, '_extract_override', None)
        if extract_override and cache_ext in extract_override:
            is_extractable = True

            tool_cmd = extract_override[cache_ext].format(file=cache_file,
                                                          dir=work_dir)

            if not execute(tool_cmd.split(), cwd=work_dir, critical=False):
                err('unable to extract with tool override\n'
                    ' (command: {})', tool_cmd)
                return None

        # attempt to extract the (compressed) tar archive with the host's
        # tar tool; if it does not exist, we'll fallback to using python's
        # internal implementation (tarfile)
        elif cache_ext.startswith(TAR_SUPPORTED):
            is_extractable = True

            # before attempting to use an external tar command, only allow
            # using it if the `force-local` option is available whenever a
            # colon character is provided, to prevent tar from assuming the
            # path is a remote target
            needs_force_local = False
            if ':' in cache_file:
                needs_force_local = True

            has_extracted = False
            if TAR.exists() and (TAR.force_local or not needs_force_local):
                tar_args = [
                    '--extract',
                    '--file=' + cache_file,
                    '--strip-components={}'.format(strip_count),
                    '--verbose',
                ]

                if needs_force_local:
                    tar_args.append('--force-local')

                if TAR.execute(tar_args, cwd=work_dir):
                    has_extracted = True
                else:
                    warn('unable to extract archive with host tar; '
                         'will use fallback')

            if not has_extracted:
                try:

                    def tar_extract(members, strip_count):
                        for member in members:
                            # strip members from package defined count
                            if strip_count > 0:
                                np = os.path.normpath(member.name)
                                parts = np.split(os.path.sep, strip_count)
                                if len(parts) <= strip_count:
                                    continue
                                member.name = parts[-1]

                            # notify the user of the target member to extract
                            print(member.name)
                            yield member

                    with tarfile.open(cache_file, 'r') as tar:
                        tar.extractall(path=work_dir,
                                       members=tar_extract(tar, strip_count))
                except Exception as e:
                    err(
                        'unable to extract tar file\n'
                        '    {}\n'
                        ' (file: {})\n'
                        ' (target: {})', e, cache_file, work_dir)
                    return False

        # extract a zip-extension cache file using python's internal
        # implementation (zipfile)
        elif cache_ext == 'zip':
            is_extractable = True

            try:
                with ZipFile(cache_file, 'r') as zip_:
                    for member in zip_.namelist():
                        # strip members from package defined count
                        member_s = member
                        if strip_count > 0:
                            np = os.path.normpath(member_s)
                            parts = np.split(os.path.sep, strip_count)
                            if len(parts) <= strip_count:
                                continue
                            member_s = parts[-1]
                        dest = os.path.join(work_dir, member_s)

                        # notify the user of the target member to extract
                        print(member)

                        # if this is a directory entry, ensure the directory
                        # exists for the destination
                        if not os.path.basename(member):
                            ensure_dir_exists(dest)
                        else:
                            # always ensure the container directory for a file
                            # exists before attempting to extract a member into
                            # it, as not all processed zip files may process
                            # a directory entry (to be created) ahead of time
                            ensure_dir_exists(os.path.dirname(dest))

                            with zip_.open(member) as s, open(dest, 'wb') as f:
                                shutil.copyfileobj(s, f)

            except Exception as e:
                err(
                    'unable to extract zip file\n'
                    '    {}\n'
                    ' (file: {})\n'
                    ' (target: {})', e, cache_file, work_dir)
                return False

    if not is_extractable:
        debug('file not considered extractable: ' + cache_file)
        try:
            shutil.copy2(cache_file, work_dir)
        except IOError as e:
            err(
                'unable to copy over cache file\n'
                '    {}\n'
                ' (file: {})\n'
                ' (target: {})', e, cache_file, work_dir)
            return False

    return True
Ejemplo n.º 5
0
def initialize_sample(opts):
    """
    initialize a sample project

    Generates a sample provided in the root directory to help new users or new
    project get started.

    Args:
        opts: options for this run

    Returns:
        ``True`` if the sample project could be initialized; ``False`` if an
        issue has occurred generating the sample project
    """

    root_dir = opts.root_dir

    if not ensure_dir_exists(root_dir):
        return False

    if os.listdir(root_dir):
        err('unable to initialize sample project is non-empty directory')
        return False

    sample_dir = os.path.join(root_dir, 'package', 'sample')

    success = True
    if ensure_dir_exists(sample_dir):
        # sample project
        sample_defs = os.path.join(root_dir, 'package', 'sample', 'sample')
        try:
            with open(sample_defs, 'w') as f:
                f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

SAMPLE_DEPENDENCIES = []
SAMPLE_LICENSE = ['<license name>']
SAMPLE_LICENSE_FILES = ['<license file>']
SAMPLE_SITE = '<location for sources>'
SAMPLE_TYPE = '<package-type>'
SAMPLE_VERSION = '<package-version>'
''')

            verbose('written sample file')
        except IOError as e:
            err('unable to generate a sample file')
            verbose(str(e))
            success = False
    else:
        success = False

    # .gitignore
    try:
        project_gitignore = os.path.join(root_dir,
                                         '.gitignore')  # (assumption)
        with open(project_gitignore, 'w') as f:
            f.write('''\
# releng-tool
/cache/
/dl/
/output/
.releng-flag-*
''')

        verbose('written .gitignore file')
    except IOError as e:
        err('unable to generate a .gitignore file')
        verbose(str(e))
        success = False

    # releng project
    try:
        project_defs = os.path.join(root_dir, 'releng')
        with open(project_defs, 'w') as f:
            f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

packages = [
    'sample',
]
''')

        verbose('written releng file')
    except IOError as e:
        err('unable to generate a releng file')
        verbose(str(e))
        success = False

    if success:
        log('initialized empty releng-tool project')
    else:
        warn('partially initialized a releng-tool project')
    return success
Ejemplo n.º 6
0
def stage(engine, pkg, ignore_cache, extra_opts):
    """
    handles the fetching stage for a package

    With a provided engine and package instance, the fetching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being fetched
        ignore_cache: always attempt to ignore the cache
        extra_opts: extra options for the fetch operation (if applicable)

    Returns:
        ``True`` if the fetching stage is completed; ``False`` otherwise
    """
    assert pkg.vcs_type
    name = pkg.name
    debug('process fetch stage: ' + name)

    # packages flagged for local sources requires to be already checked out
    if pkg.local_srcs:
        if os.path.isdir(pkg.build_dir):
            return True

        err(
            '''\
missing local sources for internal package: {0}

The active configuration is flagged for 'local sources' mode; however, an
internal package cannot be found in the local system. Before continuing, ensure
you have checked out all internal packages on your local system (or, disable the
local sources option to use the default process).

       Package: {0}
 Expected Path: {1}''', name, pkg.build_dir)
        return False

    # if the vcs-type is archive-based, flag that hash checks are needed
    perform_file_asc_check = False
    perform_file_hash_check = False
    if pkg.vcs_type == VcsType.URL:
        perform_file_asc_check = os.path.exists(pkg.asc_file)
        perform_file_hash_check = True

    fetch_opts = RelengFetchOptions()
    replicate_package_attribs(fetch_opts, pkg)
    fetch_opts.cache_dir = pkg.cache_dir
    fetch_opts.ext = pkg.ext_modifiers
    fetch_opts.extra_opts = extra_opts
    fetch_opts.ignore_cache = ignore_cache
    fetch_opts.name = name
    fetch_opts.revision = pkg.revision
    fetch_opts.site = pkg.site
    fetch_opts.version = pkg.version
    fetch_opts._mirror = False
    fetch_opts._quirks = engine.opts.quirks
    fetch_opts._urlopen_context = engine.opts.urlopen_context

    cache_filename = os.path.basename(pkg.cache_file)
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir, \
            generate_temp_dir(out_dir) as interim_cache_dir:
        with interim_working_dir(work_dir):
            interim_cache_file = os.path.join(interim_cache_dir,
                                              cache_filename)
            fetch_opts.cache_file = interim_cache_file
            fetch_opts.work_dir = work_dir

            # check if file caching should be ignored
            #
            # In special cases, a developer may configure a project to have a
            # fetched source not to cache. For example, pulling from a branch of
            # a VCS source will make a cache file from the branch and will
            # remain until manually removed from a cache file. A user may wish
            # to re-build the local cache file after cleaning their project.
            # While the releng-tool framework separates fetching/extraction into
            # two parts, ignoring cached assets can be partially achieved by
            # just removing any detected cache file if a project is configured
            # to ignore a cache file.
            if engine.opts.devmode and pkg.devmode_ignore_cache is not None:
                fetch_opts.ignore_cache = pkg.devmode_ignore_cache

                if pkg.devmode_ignore_cache and os.path.exists(pkg.cache_file):
                    verbose('removing cache file (per configuration): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # remove cache file if there is a force request to ignore the cache
            elif engine.opts.force and ignore_cache:
                if os.path.exists(pkg.cache_file):
                    verbose('removing cache file (forced): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # force explicit ignore cache (to off) when not in development mode
            elif not engine.opts.devmode and ignore_cache is None:
                fetch_opts.ignore_cache = False

            if os.path.exists(pkg.cache_file):
                rv = None
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file,
                                       pkg.cache_file,
                                       relaxed=True)

                    if hr == HashResult.VERIFIED:
                        rv = True
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                        rv = True  # no hash file to compare with; assuming ok
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                        rv = True  # empty hash file; assuming ok
                    elif hr == HashResult.MISMATCH:
                        if not path_remove(pkg.cache_file):
                            rv = False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        rv = False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            rv = False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        rv = False
                else:
                    rv = True

                if rv is not False and perform_file_asc_check and \
                        os.path.exists(pkg.cache_file):
                    if GPG.validate(pkg.asc_file, pkg.cache_file):
                        rv = True
                    else:
                        if not path_remove(pkg.cache_file):
                            err(
                                '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

 ASC File: {}
     File: {}''', pkg.asc_file, cache_filename)
                            rv = False
                        else:
                            rv = None

                if rv is not None:
                    if ignore_cache:
                        verbose('ignoring cache not supported for package: {}',
                                name)
                    return rv

            # find fetching method for the target vcs-type
            fetcher = None
            if pkg.vcs_type in engine.registry.fetch_types:

                def _(opts):
                    return engine.registry.fetch_types[pkg.vcs_type].fetch(
                        pkg.vcs_type, opts)

                fetcher = _
            elif pkg.vcs_type == VcsType.BZR:
                fetcher = fetch_bzr
            elif pkg.vcs_type == VcsType.CVS:
                fetcher = fetch_cvs
            elif pkg.vcs_type == VcsType.GIT:
                fetcher = fetch_git
            elif pkg.vcs_type == VcsType.HG:
                fetcher = fetch_mercurial
            elif pkg.vcs_type == VcsType.RSYNC:
                fetcher = fetch_rsync
            elif pkg.vcs_type == VcsType.SCP:
                fetcher = fetch_scp
            elif pkg.vcs_type == VcsType.SVN:
                fetcher = fetch_svn
            elif pkg.vcs_type == VcsType.URL:
                fetcher = fetch_url

            if not fetcher:
                err('fetch type is not implemented: {}', pkg.vcs_type)
                return False

            # if this is url-type location, attempt to search on the mirror
            # first (if configured)
            fetched = None
            if engine.opts.url_mirror and pkg.vcs_type == VcsType.URL:
                original_site = fetch_opts.site
                new_site = engine.opts.url_mirror + cache_filename
                if original_site != new_site:
                    fetch_opts._mirror = True

                    fetch_opts.site = new_site
                    fetched = fetcher(fetch_opts)
                    fetch_opts.site = original_site

                    fetch_opts._mirror = False

            # perform the fetch request (if not already fetched)
            if not fetched:
                fetched = fetcher(fetch_opts)
                if not fetched:
                    return False

            # if the fetch type has populated the package's cache directory
            # directly, we are done
            if fetched == pkg.cache_dir:
                pass
            # if the fetch type has returned a file, the file needs to be hash
            # checked and then be moved into the download cache
            elif fetched == interim_cache_file:
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file, fetched)
                    if hr == HashResult.VERIFIED:
                        pass
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                    elif hr == HashResult.MISMATCH:
                        return False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        return False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            return False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        return False

                if perform_file_asc_check:
                    if not GPG.validate(pkg.asc_file, interim_cache_file):
                        err(
                            '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

     ASC File: {}
         File: {}''', pkg.asc_file, cache_filename)
                        return False

                debug('fetch successful; moving cache file')

                # ensure the cache container/directory exists
                cache_dir = os.path.dirname(pkg.cache_file)
                if not ensure_dir_exists(cache_dir):
                    return False

                try:
                    shutil.move(interim_cache_file, pkg.cache_file)
                except shutil.Error:
                    err(
                        'invalid fetch operation (internal error; fetch mode '
                        '"{}" has provided a missing cache file)',
                        pkg.vcs_type)
                    return False
            else:
                err(
                    'invalid fetch operation (internal error; fetch mode "{}" '
                    'has returned an unsupported value)', pkg.vcs_type)
                return False

    return True
Ejemplo n.º 7
0
def _fetch_srcs(opts, cache_dir, revision, desc=None, refspecs=None):
    """
    invokes a git fetch call of the configured origin into a bare repository

    With a provided cache directory (``cache_dir``; bare repository), fetch the
    contents of a configured origin into the directory. The fetch call will
    use a restricted depth, unless configured otherwise. In the event a target
    revision cannot be found (if provided), an unshallow fetch will be made.

    This call may be invoked without a revision provided -- specifically, this
    can occur for submodule configurations which do not have a specific revision
    explicitly set.

    Args:
        opts: fetch options
        cache_dir: the bare repository to fetch into
        revision: expected revision desired from the repository
        desc (optional): description to use for error message
        refspecs (optional): additional refspecs to add to the fetch call

    Returns:
        ``True`` if the fetch was successful; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir

    if not desc:
        desc = 'repository: {}'.format(opts.name)

    log('fetching most recent sources')
    prepared_fetch_cmd = [
        git_dir,
        'fetch',
        '--progress',
        '--prune',
        'origin',
    ]

    # limit fetch depth
    target_depth = 1
    if opts._git_depth is not None:
        target_depth = opts._git_depth
    limited_fetch = (target_depth
                     and 'releng.git.no_depth' not in opts._quirks)

    depth_cmds = [
        '--depth',
        str(target_depth),
    ]

    # if a revision is provided, first attempt to do a revision-specific fetch
    quick_fetch = 'releng.git.no_quick_fetch' not in opts._quirks
    if revision and quick_fetch:
        ls_cmd = [
            'ls-remote',
            '--exit-code',
            'origin',
        ]
        debug('checking if tag exists on remote')
        if GIT.execute(ls_cmd + ['--tags', 'refs/tags/{}'.format(revision)],
                       cwd=cache_dir,
                       quiet=True):
            debug('attempting a tag reference fetch operation')
            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append('+refs/tags/{0}:refs/tags/{0}'.format(revision))
            if limited_fetch:
                fetch_cmd.extend(depth_cmds)

            if GIT.execute(fetch_cmd, cwd=cache_dir):
                debug('found the reference')
                return True

        debug('checking if reference exists on remote')
        if GIT.execute(ls_cmd + ['--heads', 'refs/heads/{}'.format(revision)],
                       cwd=cache_dir,
                       quiet=True):
            debug('attempting a head reference fetch operation')
            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append(
                '+refs/heads/{0}:refs/remotes/origin/{0}'.format(revision))
            if limited_fetch:
                fetch_cmd.extend(depth_cmds)

            if GIT.execute(fetch_cmd, cwd=cache_dir):
                debug('found the reference')
                return True

    # fetch standard (and configured) refspecs
    std_refspecs = [
        '+refs/heads/*:refs/remotes/origin/*',
        '+refs/tags/*:refs/tags/*',
    ]
    prepared_fetch_cmd.extend(std_refspecs)

    # allow fetching addition references if configured (e.g. pull requests)
    if refspecs:
        for ref in refspecs:
            prepared_fetch_cmd.append(
                '+refs/{0}:refs/remotes/origin/{0}'.format(ref))

    fetch_cmd = list(prepared_fetch_cmd)
    if limited_fetch:
        fetch_cmd.extend(depth_cmds)

    if not GIT.execute(fetch_cmd, cwd=cache_dir):
        err('unable to fetch branches/tags from remote repository')
        return False

    if revision:
        verbose('verifying target revision exists')
        exists_state = revision_exists(git_dir, revision)
        if exists_state in REVISION_EXISTS:
            pass
        elif (exists_state == GitExistsType.MISSING_HASH and limited_fetch
              and opts._git_depth is None):
            warn('failed to find hash on depth-limited fetch; fetching all...')

            fetch_cmd = list(prepared_fetch_cmd)
            fetch_cmd.append('--unshallow')

            if not GIT.execute(fetch_cmd, cwd=cache_dir):
                err('unable to unshallow fetch state')
                return False

            if revision_exists(git_dir, revision) not in REVISION_EXISTS:
                err(
                    'unable to find matching revision in {}\n'
                    ' (revision: {})', desc, revision)
                return False
        else:
            err('unable to find matching revision in {}\n'
                'revision: {})', desc, revision)
            return False

    return True
Ejemplo n.º 8
0
def stage(engine, pkg):
    """
    handles the extraction stage for a package

    With a provided engine and package instance, the extraction stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being extracted

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    # packages flagged for local sources do not have an extraction stage
    if pkg.local_srcs:
        return True

    # skip packages flagged not to extract
    if pkg.no_extraction:
        return True

    note('extracting {}...', pkg.name)
    sys.stdout.flush()

    extract_opts = RelengExtractOptions()
    replicate_package_attribs(extract_opts, pkg)
    extract_opts.cache_dir = pkg.cache_dir
    extract_opts.cache_file = pkg.cache_file
    extract_opts.ext = pkg.ext_modifiers
    extract_opts.name = pkg.name
    extract_opts.revision = pkg.revision
    extract_opts.strip_count = pkg.strip_count
    extract_opts.version = pkg.version
    extract_opts._extract_override = engine.opts.extract_override
    extract_opts._quirks = engine.opts.quirks

    if os.path.exists(pkg.build_dir):
        warn('build directory exists before extraction; removing')

        if not path_remove(pkg.build_dir):
            err('unable to cleanup build directory: ' + pkg.build_dir)
            return False

    # prepare and step into the a newly created working directory
    #
    # An extractor will take the contents of an archive, cache directory or
    # other fetched content and populate the "work" directory. On successful
    # extraction (or moving resources), the work directory will be moved to the
    # package's respective build directory.
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir:
        with interim_working_dir(work_dir):
            extract_opts.work_dir = work_dir

            extracter = None
            hash_exclude = []
            extract_types = engine.registry.extract_types
            if pkg.extract_type and pkg.extract_type in extract_types:
                def _(opts):
                    return engine.registry.extract_types[pkg.vcs_type].extract(
                        pkg.vcs_type, opts)
                extracter = _
            elif pkg.vcs_type in extract_types:
                extracter = extract_types[pkg.vcs_type].extract
            elif pkg.vcs_type == VcsType.GIT:
                extracter = extract_git
            elif pkg.vcs_type == VcsType.HG:
                extracter = extract_mercurial
            elif os.path.isfile(pkg.cache_file):
                cache_basename = os.path.basename(pkg.cache_file)
                hash_exclude.append(cache_basename)
                extracter = extract_archive

            if not extracter:
                err('extract type is not implemented: {}', pkg.vcs_type)
                return False

            # perform the extract request
            extracted = extracter(extract_opts)
            if not extracted:
                return False

            result = verify_hashes(pkg.hash_file, work_dir, hash_exclude)
            if result == HashResult.VERIFIED:
                pass
            elif result == HashResult.BAD_PATH:
                if not pkg.is_internal:
                    warn('missing hash file for package: ' + pkg.name)
            elif result == HashResult.EMPTY:
                if not pkg.is_internal:
                    verbose('hash file for package is empty: ' + pkg.name)
            elif result in (HashResult.BAD_FORMAT, HashResult.MISMATCH,
                    HashResult.MISSING_LISTED, HashResult.UNSUPPORTED):
                return False
            else:
                err('invalid extract operation (internal error; '
                    'hash-check failure: {})', result)
                return False

        debug('extraction successful; moving sources into package output '
            'directory: ' + pkg.build_dir)
        shutil.move(work_dir, pkg.build_dir)

    return True
Ejemplo n.º 9
0
def _workdir_extract(cache_dir, work_dir, revision):
    """
    extract a provided revision from a cache (bare) repository to a work tree

    Using a provided bare repository (``cache_dir``) and a working tree
    (``work_dir``), extract the contents of the repository using the providing
    ``revision`` value. This call will force the working directory to match the
    target revision. In the case where the work tree is diverged, the contents
    will be replaced with the origin's revision.

    Args:
        cache_dir: the cache repository
        work_dir: the working directory
        revision: the revision

    Returns:
        ``True`` if the extraction has succeeded; ``False`` otherwise
    """

    git_dir = '--git-dir=' + cache_dir
    work_tree = '--work-tree=' + work_dir

    # if a revision is not provided, extract the HEAD from the cache
    if not revision:
        revision = GIT.extract_submodule_revision(cache_dir)
        if not revision:
            return False

    log('checking out target revision into work tree')
    if not GIT.execute([
            git_dir, work_tree, '-c', 'advice.detachedHead=false', 'checkout',
            '--force', revision
    ],
                       cwd=work_dir):
        err('unable to checkout revision')
        return False

    log('ensure target revision is up-to-date in work tree')
    origin_revision = 'origin/{}'.format(revision)
    output = []
    if GIT.execute(
        [git_dir, 'rev-parse', '--quiet', '--verify', origin_revision],
            quiet=True,
            capture=output):
        remote_revision = ''.join(output)

        output = []
        GIT.execute([git_dir, 'rev-parse', '--quiet', '--verify', 'HEAD'],
                    quiet=True,
                    capture=output)
        local_revision = ''.join(output)

        debug('remote revision: {}', remote_revision)
        debug('local revision: {}', local_revision)

        if local_revision != remote_revision:
            warn('diverged revision detected; attempting to correct...')
            if not GIT.execute([
                    git_dir,
                    work_tree,
                    'reset',
                    '--hard',
                    origin_revision,
            ],
                               cwd=work_dir):
                err('unable to checkout revision')
                return False

    return True
Ejemplo n.º 10
0
def verify(hash_file, path, exclude=None, relaxed=False, quiet=False):
    """
    verify a file or directory with the hashes defined in the provided hash file

    Performs a hash verification check for one or more files at the provided
    path. The provided ``hash_file`` will contain a series of expected hash
    entries for one or more files. If the provided ``path`` is a file, the
    file's hash will be compared to a matching entry in the hash file. If the
    provided ``path` is a directory, each entry defined in the ``hash_file``
    will be checked and verified. The ``hash_file`` is a UTF-8 encoded file
    containing 3-tuple entries defining the type of hash algorithm used, the
    hash value expected and the asset associated with the hash (see
    ``load_hashes``). On the successful verification of all files, this method
    will return ``HashResult.VERIFIED``. Other warning/error states for
    verification will be return with a respective ``HashResult`` value.

    Args:
        hash_file: the file containing hash information
        path: the file or directory to verify
        exclude: assets to exclude from the verification check
        relaxed: relax logging to only warn on detected missing/mismatched
        quiet: disablement of error messages to standard out

    Returns:
        the hash result (``HashResult``)
    """
    is_file = False
    if os.path.isdir(path):
        pass
    elif os.path.isfile(path):
        is_file = True
    else:
        return HashResult.BAD_PATH

    # load hashes
    try:
        hash_info = load(hash_file)
    except BadFileHashLoadError:
        return HashResult.BAD_PATH
    except BadFormatHashLoadError as e:
        if not quiet:
            err(
                '''\
hash file is not properly formatted

The hash file provided is incorrectly formatted. The hash file expects lines
with the hash type, hash and target file provided. For example:

    sha1 f572d396fae9206628714fb2ce00f72e94f2258f my-file

Please correct the following hash file:

    Hash File: {}
      Details: {}''', hash_file, e)

        return HashResult.BAD_FORMAT

    # no hash information
    if not hash_info:
        return HashResult.EMPTY

    # if this is a target file, filter out other hash entries
    if is_file:
        target = os.path.basename(path)
        path = os.path.abspath(os.path.join(path, os.pardir))
        hash_info = [x for x in hash_info if x[2] == target]
        if not hash_info:
            return HashResult.MISSING_ARCHIVE

    # filter out excluded assets (if any)
    if exclude:
        hash_info = [x for x in hash_info if x[2] not in exclude]
        if not hash_info:
            return HashResult.EMPTY

    hash_catalog = {}
    for type_, hash_, asset in hash_info:
        types = hash_catalog.setdefault(asset, {})
        types.setdefault(type_, []).append(hash_.lower())

    for asset, type_hashes in hash_catalog.items():
        hashers = {}
        for hash_entry in type_hashes.keys():
            # extract the specific hash type, if the entry includes a key length
            hash_type, _, _ = hash_entry.partition(':')

            hashers[hash_entry] = _get_hasher(hash_type)
            if not hashers[hash_entry]:
                if not quiet:
                    err(
                        '''\
unsupported hash type

The hash file defines a hash type not supported by the releng-tool. Officially
supported hash types are FIPS supported algorithms provided by the Python
interpreter (e.g. sha1, sha224, sha256, sha384, sha512). Other algorithms,
while unofficially supported, can be used if provided by the system's OpenSSL
library.

     Hash File: {}
 Provided Type: {}''', hash_file, hash_type)

                debug('unsupported hash type: {}', hash_type)
                return HashResult.UNSUPPORTED

        target_file = os.path.join(path, asset)
        try:
            with open(target_file, 'rb') as f:
                buf = f.read(HASH_READ_BLOCKSIZE)
                while buf:
                    for hasher in hashers.values():
                        hasher.update(buf)
                    buf = f.read(HASH_READ_BLOCKSIZE)
        except IOError:
            if not quiet:
                if relaxed:
                    warn('missing expected file for verification: ' + asset)
                else:
                    err(
                        '''\
missing expected file for verification

A defined hash entry cannot be verified since the target file does not exist.
Ensure the hash file correctly names an expected file.

    Hash File: {}
         File: {}''', hash_file, asset)

            return HashResult.MISSING_LISTED

        for hash_entry, hasher in hashers.items():
            _, _, hash_len = hash_entry.partition(':')
            if hash_len:
                digest = hasher.hexdigest(int(hash_len))
            else:
                digest = hasher.hexdigest()

            debug('calculated-hash: {} {}:{}', asset, hash_entry, digest)
            hashes = type_hashes[hash_entry]
            if digest not in hashes:
                if not quiet:
                    if relaxed:
                        warn('hash mismatch detected: ' + asset)
                    else:
                        provided = ''
                        for hash_ in hashes:
                            provided += '\n     Provided: {}'.format(hash_)

                        err(
                            '''\
hash mismatch detected

    Hash File: {}
         File: {}
     Detected: {}{}''', hash_file, asset, digest, provided)

                return HashResult.MISMATCH

    return HashResult.VERIFIED
Ejemplo n.º 11
0
    def load_package(self, name, script):
        """
        load a package definition

        Attempts to load a package definition of a given ``name`` from the
        provided ``script`` location. The script will be examine for required
        and optional configuration keys. On a successful execution/parsing, a
        package object will be returned along with other meta information. On
        error, ``None`` types are returned.

        Args:
            name: the package name
            script: the package script to load

        Returns:
            returns a tuple of three (3) containing the package instance, the
            extracted environment/globals from the package script and a list of
            known package dependencies

        Raises:
            RelengToolInvalidPackageConfiguration: when an error has been
                                                    detected loading the package
        """
        verbose('loading package: {}', name)
        debug('script {}', script)
        opts = self.opts

        if not os.path.isfile(script):
            raise RelengToolMissingPackageScript({
                'pkg_name': name,
                'script': script,
            })

        pkg_def_dir = os.path.abspath(os.path.join(script, os.pardir))
        self.script_env['PKG_DEFDIR'] = pkg_def_dir

        try:
            env = run_script(script, self.script_env, catch=False)
        except Exception as e:
            raise RelengToolInvalidPackageScript({
                'description':
                str(e),
                'script':
                script,
                'traceback':
                traceback.format_exc(),
            })

        self._active_package = name
        self._active_env = env

        # prepare helper expand values
        expand_extra = {}

        # version/revision extraction first
        #
        # Attempt to check the version first since it will be the most commonly
        # used package field -- rather initially fail on a simple field first
        # (for new packages and/or developers) than breaking on a possibly more
        # complex field below. Note that the version field is optional, in cases
        # where a package type does not need a version entry (e.g. sites which
        # do not require a version value for fetching or there is not revision
        # value to use instead).
        #
        # Note that when in development mode, the development-mode revision
        # (if any is set) needs to be checked as well. This value may override
        # the package's version value.

        # version
        pkg_version = self._fetch(Rpk.VERSION)

        if not pkg_version:
            pkg_version = ''

        pkg_version_key = pkg_key(name, Rpk.VERSION)
        expand_extra[pkg_version_key] = pkg_version

        # development mode revision
        pkg_has_devmode_option = False
        pkg_devmode_revision = self._fetch(Rpk.DEVMODE_REVISION,
                                           allow_expand=True,
                                           expand_extra=expand_extra)

        if pkg_devmode_revision:
            pkg_has_devmode_option = True

            if opts.revision_override and name in opts.revision_override:
                pkg_devmode_revision = opts.revision_override[name]

            if opts.devmode:
                pkg_version = pkg_devmode_revision
                expand_extra[pkg_version_key] = pkg_version

        # revision
        if opts.revision_override and name in opts.revision_override:
            pkg_revision = opts.revision_override[name]
        else:
            pkg_revision = self._fetch(Rpk.REVISION,
                                       allow_expand=True,
                                       expand_extra=expand_extra)
        if opts.devmode and pkg_devmode_revision:
            pkg_revision = pkg_devmode_revision
        elif not pkg_revision:
            pkg_revision = pkg_version

        # site / vcs-site detection
        #
        # After extracted required version information, the site / VCS type
        # needs to be checked next. This will allow the manage to early detect
        # if a version/revision field is required, and fail early if we have
        # not detected one from above.

        # site
        if opts.sites_override and name in opts.sites_override:
            # Site overriding is permitted to help in scenarios where a builder
            # is unable to acquire a package's source from the defined site.
            # This includes firewall settings or a desire to use a mirrored
            # source when experiencing network connectivity issues.
            pkg_site = opts.sites_override[name]
        else:
            pkg_site = self._fetch(Rpk.SITE,
                                   allow_expand=True,
                                   expand_extra=expand_extra)

        # On Windows, if a file site is provided, ensure the path value is
        # converted to a posix-styled path, to prevent issues with `urlopen`
        # being provided an unescaped path string
        if sys.platform == 'win32' and \
                pkg_site and pkg_site.startswith('file://'):
            pkg_site = pkg_site[len('file://'):]
            abs_site = os.path.isabs(pkg_site)
            pkg_site = pkg_site.replace(os.sep, posixpath.sep)
            if abs_site:
                pkg_site = '/' + pkg_site
            pkg_site = 'file://' + pkg_site

        # vcs-type
        pkg_vcs_type = None
        pkg_vcs_type_raw = self._fetch(Rpk.VCS_TYPE)
        if pkg_vcs_type_raw:
            pkg_vcs_type_raw = pkg_vcs_type_raw.lower()
            if pkg_vcs_type_raw in VcsType:
                pkg_vcs_type = pkg_vcs_type_raw
            elif pkg_vcs_type_raw in self.registry.fetch_types:
                pkg_vcs_type = pkg_vcs_type_raw
            else:
                raise RelengToolUnknownVcsType({
                    'pkg_name':
                    name,
                    'pkg_key':
                    pkg_key(name, Rpk.VCS_TYPE),
                })

        if not pkg_vcs_type:
            if pkg_site:
                site_lc = pkg_site.lower()
                if site_lc.startswith('bzr+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.BZR
                elif site_lc.startswith('cvs+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.CVS
                elif site_lc.startswith((
                        ':ext:',
                        ':extssh:',
                        ':gserver:',
                        ':kserver:',
                        ':pserver:',
                )):
                    pkg_vcs_type = VcsType.CVS
                elif site_lc.startswith('git+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.GIT
                elif site_lc.endswith('.git'):
                    pkg_vcs_type = VcsType.GIT
                elif site_lc.startswith('hg+'):
                    pkg_site = pkg_site[3:]
                    pkg_vcs_type = VcsType.HG
                elif site_lc.startswith('rsync+'):
                    pkg_site = pkg_site[6:]
                    pkg_vcs_type = VcsType.RSYNC
                elif site_lc.startswith('scp+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.SCP
                elif site_lc.startswith('svn+'):
                    pkg_site = pkg_site[4:]
                    pkg_vcs_type = VcsType.SVN
                elif site_lc == 'local':
                    pkg_vcs_type = VcsType.LOCAL
                else:
                    pkg_vcs_type = VcsType.URL
            else:
                pkg_vcs_type = VcsType.NONE

        if pkg_vcs_type == VcsType.LOCAL:
            warn('package using local content: {}', name)

        # check if the detected vcs type needs a revision, and fail if we do
        # not have one
        if not pkg_revision and pkg_vcs_type in (
                VcsType.BZR,
                VcsType.CVS,
                VcsType.GIT,
                VcsType.HG,
                VcsType.SVN,
        ):
            raise RelengToolMissingPackageRevision({
                'pkg_name':
                name,
                'pkg_key1':
                pkg_key(name, Rpk.VERSION),
                'pkg_key2':
                pkg_key(name, Rpk.REVISION),
                'vcs_type':
                pkg_vcs_type,
            })

        # archive extraction strip count
        pkg_strip_count = self._fetch(Rpk.STRIP_COUNT,
                                      default=DEFAULT_STRIP_COUNT)

        # build subdirectory
        pkg_build_subdir = self._fetch(Rpk.BUILD_SUBDIR)

        # dependencies
        deps = self._fetch(Rpk.DEPS, default=[])

        # ignore cache
        pkg_devmode_ignore_cache = self._fetch(Rpk.DEVMODE_IGNORE_CACHE)

        # extension (override)
        pkg_filename_ext = self._fetch(Rpk.EXTENSION)

        # extract type
        pkg_extract_type = self._fetch(Rpk.EXTRACT_TYPE)
        if pkg_extract_type:
            pkg_extract_type = pkg_extract_type.lower()

            if pkg_extract_type not in self.registry.extract_types:
                raise RelengToolUnknownExtractType({
                    'pkg_name':
                    name,
                    'pkg_key':
                    pkg_key(name, Rpk.EXTRACT_TYPE),
                })

        # is-external
        pkg_is_external = self._fetch(Rpk.EXTERNAL)

        # is-internal
        pkg_is_internal = self._fetch(Rpk.INTERNAL)

        # no extraction
        pkg_no_extraction = self._fetch(Rpk.NO_EXTRACTION)

        # skip any remote configuration
        pkg_skip_remote_config = self._fetch(Rpk.SKIP_REMOTE_CONFIG)

        # skip any remote scripts
        pkg_skip_remote_scripts = self._fetch(Rpk.SKIP_REMOTE_SCRIPTS)

        # type
        pkg_type = None
        pkg_type_raw = self._fetch(Rpk.TYPE)
        if pkg_type_raw:
            pkg_type_raw = pkg_type_raw.lower()
            if pkg_type_raw in PackageType:
                pkg_type = pkg_type_raw
            elif pkg_type_raw in self.registry.package_types:
                pkg_type = pkg_type_raw
            else:
                raise RelengToolUnknownPackageType({
                    'pkg_name':
                    name,
                    'pkg_key':
                    pkg_key(name, Rpk.TYPE),
                })

        if not pkg_type:
            pkg_type = PackageType.SCRIPT

        # ######################################################################

        # git configuration options for a repository
        pkg_git_config = self._fetch(Rpk.GIT_CONFIG)

        # git-depth
        pkg_git_depth = self._fetch(Rpk.GIT_DEPTH)

        # git-refspecs
        pkg_git_refspecs = self._fetch(Rpk.GIT_REFSPECS)

        # git-submodules
        pkg_git_submodules = self._fetch(Rpk.GIT_SUBMODULES)

        # git-verify
        pkg_git_verify_revision = self._fetch(Rpk.GIT_VERIFY_REVISION)

        # ######################################################################

        # checks
        if pkg_is_external is not None and pkg_is_internal is not None:
            if pkg_is_external == pkg_is_internal:
                raise RelengToolConflictingConfiguration({
                    'pkg_name':
                    name,
                    'pkg_key1':
                    pkg_key(name, Rpk.EXTERNAL),
                    'pkg_key2':
                    pkg_key(name, Rpk.INTERNAL),
                    'desc':
                    'package flagged as external and internal',
                })
        elif pkg_is_external is not None:
            pkg_is_internal = not pkg_is_external
        elif pkg_is_internal is not None:
            pass
        elif opts.default_internal_pkgs:
            pkg_is_internal = True
        else:
            pkg_is_internal = False

        # check a site is defined for vcs types which require it
        if not pkg_site and pkg_vcs_type in (
                VcsType.BZR,
                VcsType.CVS,
                VcsType.GIT,
                VcsType.HG,
                VcsType.RSYNC,
                VcsType.SCP,
                VcsType.SVN,
                VcsType.URL,
        ):
            raise RelengToolMissingPackageSite({
                'pkg_name':
                name,
                'pkg_key':
                pkg_key(name, Rpk.SITE),
                'vcs_type':
                pkg_vcs_type,
            })

        # list of support dvcs types
        SUPPORTED_DVCS = [
            VcsType.GIT,
            VcsType.HG,
        ]
        is_pkg_dvcs = (pkg_vcs_type in SUPPORTED_DVCS)

        # find possible extension for a cache file
        #
        # non-dvcs's will be always gzip-tar'ed.
        if pkg_vcs_type in (
                VcsType.BZR,
                VcsType.CVS,
                VcsType.RSYNC,
                VcsType.SVN,
        ):
            cache_ext = 'tgz'
        # dvcs's will not have an extension type
        elif is_pkg_dvcs:
            cache_ext = None
        # non-vcs type does not have an extension
        elif pkg_vcs_type in (VcsType.LOCAL, VcsType.NONE):
            cache_ext = None
        else:
            cache_ext = None
            url_parts = urlparse(pkg_site)

            if opts.cache_ext_transform:
                # Allow a configuration to override the target cache file's
                # extension based on the package's site path (for unique path
                # scenarios).
                cache_ext = opts.cache_ext_transform(url_parts.path)

            if not cache_ext:
                if pkg_filename_ext:
                    cache_ext = pkg_filename_ext
                else:
                    basename = os.path.basename(url_parts.path)
                    __, cache_ext = interpret_stem_extension(basename)

        # prepare package container and directory locations
        #
        # The container folder for a package will typically be a combination of
        # a package's name plus version. If no version is set, the container
        # will be only use the package's name. We try to use the version entry
        # when possible to help manage multiple versions of output (e.g. to
        # avoid conflicts when bumping versions).
        #
        # When the version value is used, we will attempt to cleanup/minimize
        # the version to help provide the container a more "sane" path. For
        # instance, if a version references a path-styled branch names (e.g.
        # `bugfix/my-bug`, we want to avoid promoting a container name which
        # can result in a sub-directory being made (e.g. `pkg-bugfix/my-bug/`).
        if pkg_version:
            pkg_nv = '{}-{}'.format(
                name, ''.join(x if (x.isalnum() or x in '-._') else '_'
                              for x in pkg_version))
        else:
            pkg_nv = name

        pkg_build_output_dir = os.path.join(opts.build_dir, pkg_nv)

        if pkg_vcs_type == VcsType.LOCAL:
            pkg_build_dir = pkg_def_dir
        else:
            pkg_build_dir = pkg_build_output_dir

        # check if an internal package is configured to point to a local
        # directory for sources
        pkg_local_srcs = False
        if pkg_is_internal and opts.local_srcs:
            # specific package name reference in the local sources; either is
            # set to the path to use, or is set to `None` to indicate at this
            # package should not be retrieved locally
            if name in opts.local_srcs:
                if opts.local_srcs[name]:
                    pkg_build_dir = opts.local_srcs[name]
                    pkg_local_srcs = True

            # check if the "global" local sources path exists; either set to
            # a specific path, or set to `None` to indicate that it will use
            # the parent path based off the root directory
            elif GBL_LSRCS in opts.local_srcs:
                if opts.local_srcs[GBL_LSRCS]:
                    container_dir = opts.local_srcs[GBL_LSRCS]
                else:
                    container_dir = os.path.dirname(opts.root_dir)

                pkg_build_dir = os.path.join(container_dir, name)
                pkg_local_srcs = True

            if pkg_build_dir == opts.root_dir:
                raise RelengToolConflictingLocalSrcsPath({
                    'pkg_name': name,
                    'root': opts.root_dir,
                    'path': pkg_build_dir,
                })

        if pkg_build_subdir:
            pkg_build_subdir = os.path.join(pkg_build_dir, pkg_build_subdir)

        cache_dir = os.path.join(opts.dl_dir, name)
        if cache_ext:
            pkg_cache_file = os.path.join(cache_dir, pkg_nv + '.' + cache_ext)
        else:
            pkg_cache_file = os.path.join(cache_dir, pkg_nv)

        # Select sources (like CMake-based projects) may wish to be using
        # out-of-source tree builds. For supported project types, adjust the
        # build output directory to a sub-folder of the originally assumed
        # output folder.
        if pkg_type == PackageType.CMAKE:
            pkg_build_output_dir = os.path.join(pkg_build_output_dir,
                                                'releng-output')

        # determine the build tree for a package
        #
        # A build tree (introduced for the libfoo-exec action), tracks the
        # directory where build commands would typically be executed for a
        # package on a host system. In most cases, this will be set to the
        # same path as `pkg_build_dir` (or the sub-directory, if provided);
        # however, some package types may have a better working directory
        # for build commands. For example, CMake projects will generate a
        # build package in an out-of-source directory (e.g.
        # `pkg_build_output_dir`), which is a better make to issue commands
        # such as "cmake --build .".
        if pkg_type == PackageType.CMAKE:
            pkg_build_tree = pkg_build_output_dir
        elif pkg_build_subdir:
            pkg_build_tree = pkg_build_subdir
        else:
            pkg_build_tree = pkg_build_dir

        # determine the package directory for this package
        #
        # Typically, a package's "cache directory" will be stored in the output
        # folder's "cache/<pkg-name>" path. However, having package-name driven
        # cache folder targets does not provide an easy way to manage sharing
        # caches between projects if they share the same content (either the
        # same site or sharing submodules). Cache targets for packages will be
        # stored in a database and can be used here to decide if a package's
        # cache will actually be stored in a different container.
        pkg_cache_dir = os.path.join(opts.cache_dir, name)
        if is_pkg_dvcs:
            ckey = pkg_cache_key(pkg_site)

            pkg_cache_dirname = name

            # if the default cache directory exists, always prioritize it (and
            # force update the cache location)
            if os.path.exists(pkg_cache_dir):
                self._dvcs_cache[name] = name
            # if the cache content is stored in another container, use it
            elif ckey in self._dvcs_cache:
                pkg_cache_dirname = self._dvcs_cache[ckey]
                verbose('alternative cache path for package: {} -> {}', name,
                        pkg_cache_dirname)

            # track ckey entry to point to our cache container
            #
            # This package's "ckey" will be used to cache the target folder
            # being used for this package, so other packages with matching site
            # values could use it. In the rare case that the "ckey" entry
            # already exists but is pointing to another folder that our target
            # one, leave it as is (assume ownership of key is managed by another
            # package).
            if ckey not in self._dvcs_cache:
                self._dvcs_cache[ckey] = pkg_cache_dirname

            # adjust the cache directory and save any new cache changes
            pkg_cache_dir = os.path.join(opts.cache_dir, pkg_cache_dirname)
            self._save_dvcs_cache()

        # (commons)
        pkg = RelengPackage(name, pkg_version)
        pkg.asc_file = os.path.join(pkg_def_dir, name + '.asc')
        pkg.build_dir = pkg_build_dir
        pkg.build_output_dir = pkg_build_output_dir
        pkg.build_subdir = pkg_build_subdir
        pkg.build_tree = pkg_build_tree
        pkg.cache_dir = pkg_cache_dir
        pkg.cache_file = pkg_cache_file
        pkg.def_dir = pkg_def_dir
        pkg.devmode_ignore_cache = pkg_devmode_ignore_cache
        pkg.extract_type = pkg_extract_type
        pkg.git_config = pkg_git_config
        pkg.git_depth = pkg_git_depth
        pkg.git_refspecs = pkg_git_refspecs
        pkg.git_submodules = pkg_git_submodules
        pkg.git_verify_revision = pkg_git_verify_revision
        pkg.has_devmode_option = pkg_has_devmode_option
        pkg.hash_file = os.path.join(pkg_def_dir, name + '.hash')
        pkg.is_internal = pkg_is_internal
        pkg.local_srcs = pkg_local_srcs
        pkg.no_extraction = pkg_no_extraction
        pkg.revision = pkg_revision
        pkg.site = pkg_site
        pkg.skip_remote_config = pkg_skip_remote_config
        pkg.skip_remote_scripts = pkg_skip_remote_scripts
        pkg.strip_count = pkg_strip_count
        pkg.type = pkg_type
        pkg.vcs_type = pkg_vcs_type

        self._apply_postinit_options(pkg)

        # (additional environment helpers)
        for env in (os.environ, env):
            env[pkg_key(name, 'BUILD_DIR')] = pkg_build_dir
            env[pkg_key(name, 'BUILD_OUTPUT_DIR')] = pkg_build_output_dir
            env[pkg_key(name, 'DEFDIR')] = pkg_def_dir
            env[pkg_key(name, 'NAME')] = name
            env[pkg_key(name, 'REVISION')] = pkg_revision
        os.environ[pkg_key(name, Rpk.VERSION)] = pkg_version

        # (internals)
        prefix = '.releng_tool-stage-'
        outdir = pkg.build_output_dir
        pkg._ff_bootstrap = os.path.join(outdir, prefix + 'bootstrap')
        pkg._ff_build = os.path.join(outdir, prefix + 'build')
        pkg._ff_configure = os.path.join(outdir, prefix + 'configure')
        pkg._ff_extract = os.path.join(outdir, prefix + 'extract')
        pkg._ff_install = os.path.join(outdir, prefix + 'install')
        pkg._ff_license = os.path.join(outdir, prefix + 'license')
        pkg._ff_patch = os.path.join(outdir, prefix + 'patch')
        pkg._ff_post = os.path.join(outdir, prefix + 'post')

        # dump package attributes if running in a debug mode
        if opts.debug:
            info = {}
            for key, value in pkg.__dict__.items():
                if not key.startswith('_'):
                    info[key] = value

            debug(
                '''package-data: {}
==============================
{}
==============================''', name, pprint.pformat(info))

        return pkg, env, deps
Ejemplo n.º 12
0
def main():
    """
    mainline

    The mainline for the releng tool.

    Returns:
        the exit code
    """
    retval = 1

    try:
        parser = argparse.ArgumentParser(prog='releng-tool',
                                         add_help=False,
                                         usage=usage())

        parser.add_argument('--assets-dir')
        parser.add_argument('--cache-dir')
        parser.add_argument('--config')
        parser.add_argument('--debug', action='store_true')
        parser.add_argument('--development', '-D', action='store_true')
        parser.add_argument('--dl-dir')
        parser.add_argument('--force', '-F', action='store_true')
        parser.add_argument('--help', '-h', action='store_true')
        parser.add_argument('--help-quirks', action='store_true')
        parser.add_argument('--images-dir')
        parser.add_argument('--jobs',
                            '-j',
                            default=0,
                            type=type_nonnegativeint)
        parser.add_argument('--local-sources',
                            '-L',
                            nargs='?',
                            action='append')
        parser.add_argument('--nocolorout', action='store_true')
        parser.add_argument('--out-dir')
        parser.add_argument('--root-dir')
        parser.add_argument('--quirk', action='append')
        parser.add_argument('--verbose', '-V', action='store_true')
        parser.add_argument('--version',
                            action='version',
                            version='%(prog)s ' + releng_version)

        known_args = sys.argv[1:]
        forward_args = []
        idx = known_args.index('--') if '--' in known_args else -1
        if idx != -1:
            forward_args = known_args[idx + 1:]
            known_args = known_args[:idx]

        args, unknown_args = parser.parse_known_args(known_args)
        if args.help:
            print(usage())
            sys.exit(0)
        if args.help_quirks:
            print(usage_quirks())
            sys.exit(0)

        # force verbose messages if debugging is enabled
        if args.debug:
            args.verbose = True

        releng_log_configuration(args.debug, args.nocolorout, args.verbose)

        # toggle on ansi colors by default for commands
        if not args.nocolorout:
            os.environ['CLICOLOR_FORCE'] = '1'

            # support character sequences (for color output on win32 cmd)
            if sys.platform == 'win32':
                enable_ansi_win32()

        verbose('releng-tool {}', releng_version)
        debug('({})', __file__)

        # extract additional argument information:
        #  - pull the action value
        #  - pull "exec" command (if applicable)
        #  - key-value entries to be injected into the running
        #     script/working environment
        new_args, unknown_args = process_args(unknown_args)
        args.action = new_args['action']
        args.action_exec = new_args['exec']
        args.injected_kv = new_args['entries']

        # register any injected entry into the working environment right away
        for k, v in args.injected_kv.items():
            os.environ[k] = v

        if unknown_args:
            warn('unknown arguments: {}', ' '.join(unknown_args))

        if forward_args:
            debug('forwarded arguments: {}', ' '.join(forward_args))

        # warn if the *nix-based system is running as root; ill-formed projects
        # may attempt to modify the local system's root
        if sys.platform != 'win32':
            if os.geteuid() == 0:  # pylint: disable=E1101
                if 'RELENG_IGNORE_RUNNING_AS_ROOT' not in os.environ:
                    # attempt to check if we are in a container; if so, ignore
                    # generating a warning -- we will check if kernel threads
                    # are running on pid2; if not, it is most likely that we
                    # are in a container environment; checks for a container
                    # do not have to be perfect here, only to try to help
                    # improve a user's experience (suppressing this warning
                    # when not running on a typical host setup)
                    try:
                        with open('/proc/2/status') as f:
                            inside_container = 'kthreadd' not in f.read()
                    except IOError:
                        inside_container = True

                    if not inside_container:
                        warn('running as root; this may be unsafe')

        # prepare engine options
        opts = RelengEngineOptions(args=args, forward_args=forward_args)

        # create and start the engine
        engine = RelengEngine(opts)
        try:
            if engine.run():
                retval = 0
        except RelengToolException as e:
            err(e)
    except KeyboardInterrupt:
        print('')

    return retval
Ejemplo n.º 13
0
def fetch(opts):
    """
    support fetching from bzr sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not BZR.exists():
        err('unable to fetch package; bzr is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    export_opts = [
        'export',
        cache_file,
        site,
        '--format=tgz',
        '--root=' + name,
        '--revision=' + revision,
    ]

    # some environments may have issue export bzr sources due to certificate
    # issues; this quirk allows injecting certifi-provided certificates for
    # all bzr exports
    if 'releng.bzr.certifi' in opts._quirks:
        global CERTIFI_MISSING_WARNED

        if certifi:
            verbose('performing bzr fetch with certifi certificates')
            pkg_site = certifi.where()
            export_opts.append('-Ossl.ca_certs=' + pkg_site)
        elif not CERTIFI_MISSING_WARNED:
            CERTIFI_MISSING_WARNED = True
            warn('''\
unable to perform bzr fetch with certifi certificates

A quirk has been enabled to export bzr images using certifi
certificates; however, certifi is not installed on this system.
''')

    log('exporting sources')
    if not BZR.execute(export_opts, poll=True):
        err('unable to export module')
        return None

    return cache_file
Ejemplo n.º 14
0
    def load(self, name, ignore=True):
        """
        load the provided extension into the registry

        Attempts to load an extension with the provided name value. If an
        extension which is already loaded in the registry is provided, the
        request to load the specific extension is ignored. If an extension could
        not be loaded, a warning is generated and this method will return
        ``False``.

        Args:
            name: name of the extension to load
            ignore (optional): ignore if the detected extension could not be
                                loaded (default: True)

        Returns:
            whether or not the extension was loaded in the registry
        """

        # ignore if extension is already loaded
        if name in self.extension:
            return True

        loaded = False
        debug('attempting to load extension: {}', name)
        try:
            try:
                plugin = import_module(name)
            except RelengModuleNotFoundError:
                # python 2.7 may not be able to load from a nested path; try
                # searching through each package (if a nested module)
                if sys.version_info >= (3, 0) or '.' not in name:
                    raise

                # split the module into parts and for each part, check to see
                # if it's a package directory; if so, keep going until the last
                # namespace package
                ext_parts = name.split('.')
                path = None
                for part in ext_parts[:-1]:
                    file, pathname, desc = imp.find_module(part, path)

                    if desc[-1] != imp.PKG_DIRECTORY:
                        raise ImportError(name)

                    pkg = imp.load_module(part, file, pathname, desc)
                    path = pkg.__path__

                # with the path of the last namespace package found, find the
                # desired module in this path
                last_part = ext_parts[-1]
                file, pathname, desc = imp.find_module(last_part, path)
                plugin = imp.load_module(last_part, file, pathname, desc)

            if hasattr(plugin, 'releng_setup'):
                if not ignore:
                    plugin.releng_setup(self)
                    loaded = True
                else:
                    try:
                        plugin.releng_setup(self)
                        loaded = True
                    except RelengInvalidSetupException as e:
                        warn(
                            'extension is not supported '
                            'due to an invalid setup: {}\n'
                            ' ({})', name, e)
                    except RelengVersionNotSupportedException as e:
                        warn(
                            'extension is not supported '
                            'with this version: {}\n'
                            ' ({})', name, e)

                if loaded:
                    self.extension.append(name)
                    verbose('loaded extension: {}', name)
                    loaded = True
            else:
                warn('extension does not have a setup method: {}', name)
        except RelengModuleNotFoundError:
            warn('unable to find extension: {}', name)

        return loaded
Ejemplo n.º 15
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2020 releng-tool

from releng_tool.util.log import warn
from releng_tool.__main__ import main
import sys

if __name__ == '__main__':
    warn('(deprecated) releng module is deprecated over releng_tool')
    sys.exit(main())