Exemplo n.º 1
0
    def save(self, desc=None):
        """
        save statistics for future reference

        Will save any statistics which should be persisted for future
        considerations. This is to help render a "complete" report of statistics
        when re-running releng-tool with packages which may already been
        completed.

        Args:
            desc (optional): description of this save event (for logging)
        """

        if not ensure_dir_exists(self.out_dir):
            verbose('unable to generate output directory for statistics')
            return None

        if desc:
            desc = ' ({})'.format(desc)
        else:
            desc = ''

        try:
            with open(self.dat_file, 'wb') as f:
                pickle.dump(self.data, f, protocol=2)  # 2 for py2/py3 support
            debug('saved statistics' + desc)
        except IOError:
            verbose('failed to save statistics' + desc)
Exemplo n.º 2
0
    def process(self, pkg):
        """
        process a provided package

        This request will process a package through the various stages (if these
        stages are applicable to the current run state and have yet been
        executed from a previous run). A package-specific script environment
        will be prepared and a package will go through the process of:

        - Extraction
        - Patching
        - License management (if needed)
        - Bootstrapping
        - Configuration
        - Building
        - Installing
        - Post-processing

        Args:
            pkg: the package to process

        Returns:
            returns whether or not the pipeline should continue processing
        """

        gaction = self.opts.gbl_action
        paction = self.opts.pkg_action
        target = self.opts.target_action

        # skip if generating license information and no license
        # files exist for this package
        if gaction == GlobalAction.LICENSES and not pkg.license_files:
            return True

        # extracting
        flag = pkg._ff_extract
        if check_file_flag(flag) == FileFlag.NO_EXIST:
            self.engine.stats.track_duration_start(pkg.name, 'extract')
            # none/local-vcs-type packages do not need to fetch
            if pkg.vcs_type in (VcsType.LOCAL, VcsType.NONE):
                pass
            elif not extract_stage(self.engine, pkg):
                raise RelengToolExtractionStageFailure
            # now that the extraction stage has (most likely)
            # created a build directory, ensure the output directory
            # exists as well (for file flags and other content)
            if not ensure_dir_exists(pkg.build_output_dir):
                raise RelengToolExtractionStageFailure
            self.engine.stats.track_duration_end(pkg.name, 'extract')
            if process_file_flag(True, flag) != FileFlag.CONFIGURED:
                return False
        if gaction == GlobalAction.EXTRACT:
            return True
        if paction == PkgAction.EXTRACT and pkg.name == target:
            return False

        # process the package data with a package-specific environment
        with self._stage_env(pkg) as pkg_env:
            return self._process_data(pkg, pkg_env)
Exemplo n.º 3
0
    def _dummy_pkg(self, container, pkg=None):
        uid = uuid.uuid4().hex

        if pkg:
            container = os.path.join(container, pkg)
        assert ensure_dir_exists(container)

        metadata = os.path.join(container, 'metadata')
        with open(metadata, 'w') as f:
            f.write(uid)

        return uid
Exemplo n.º 4
0
def _fetch_submodule(opts, name, cache_dir, revision, site):
    """
    fetch a submodule into a provided cache/bar repository

    Fetches an individual submodule into the provided cache directory. The
    origin of the submodule is provided via the ``site`` argument. A revision,
    if provided, can be used to help verify the target revision desired for a
    submodule; however, it is not required (e.g. when a repository does not set
    an explicit submodule revision).

    Args:
        opts: fetch options
        name: the name of the submodule (for state messages)
        cache_dir: the cache/bare repository to fetch into
        revision: the revision (branch, tag, hash) to fetch
        site: the site to fetch the submodule from

    Returns:
        ``True`` if the submodule has been fetched; ``False`` otherwise
    """
    git_dir = '--git-dir=' + cache_dir

    # check if we have the target revision cached; if so, submodule is ready
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        if not revision:
            return _sync_git_origin(cache_dir, site)

        if revision_exists(git_dir, revision) in REVISION_EXISTS:
            return _sync_git_origin(cache_dir, site)

    log('processing submodule (package: {}) {}...', opts.name, name)
    sys.stdout.flush()

    # validate any cache directory (if one exists)
    has_cache, bad_validation = _validate_cache(cache_dir)
    if bad_validation:
        return None

    # if we have no cache for this repository, build one
    if not has_cache:
        if not ensure_dir_exists(cache_dir):
            return False

        if not _create_bare_git_repo(cache_dir):
            return False

    # ensure configuration is properly synchronized
    if not _sync_git_origin(cache_dir, site):
        return False

    # fetch sources for this submodule
    desc = 'submodule ({}): {}'.format(opts.name, name)
    return _fetch_srcs(opts, cache_dir, revision, desc=desc)
Exemplo n.º 5
0
    def generate(self):
        """
        generate a final report of statistics

        To be invoked at the end of a releng-tool process, this call will
        generate reports/etc. for any tracked statistics information based on
        the current and previous invoked executions (if any).
        """
        if not ensure_dir_exists(self.out_dir):
            verbose('unable to generate output directory for statistics')
            return None

        self._generate_duration()
Exemplo n.º 6
0
def fetch(opts):
    """
    support fetching from svn sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    if not SVN.exists():
        err('unable to fetch package; svn is not installed')
        return None

    note('fetching {}...'.format(name))
    sys.stdout.flush()

    log('checking out sources')
    if not SVN.execute(['checkout', '-r', revision, site, work_dir],
                       cwd=work_dir):
        err('unable to checkout module')
        return None

    log('caching sources')

    def svn_exclude(file):
        if file.endswith('.svn'):
            return True
        return False

    # ensure cache file's directory exists
    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(work_dir, arcname=name, exclude=svn_exclude)

    return cache_file
Exemplo n.º 7
0
    def test_utilio_ensuredirexists(self):
        with prepare_workdir() as work_dir:
            result = ensure_dir_exists(work_dir)
            self.assertTrue(result)

            new_dir = os.path.join(work_dir, 'test1')
            self.assertFalse(os.path.exists(new_dir))

            result = ensure_dir_exists(new_dir)
            self.assertTrue(result)
            self.assertTrue(os.path.exists(new_dir))

            new_file = os.path.join(work_dir, 'test2')
            with open(new_file, 'ab'):
                pass
            self.assertTrue(os.path.isfile(new_file))

            result = ensure_dir_exists(new_file)
            self.assertFalse(result)
            self.assertTrue(os.path.isfile(new_file))

            with self.assertRaises(SystemExit):
                ensure_dir_exists(new_file, critical=True)
            self.assertTrue(os.path.isfile(new_file))
Exemplo n.º 8
0
    def _save_dvcs_cache(self):
        """
        save dvcs cache information

        Will save any DVCS cache information which future runs of releng-tool
        can be used to hint where package cache data is stored.
        """

        if not self._dvcs_cache_enabled:
            return

        if not ensure_dir_exists(self.opts.cache_dir):
            verbose('unable to generate output directory for dvcs cache')
            return

        try:
            with open(self._dvcs_cache_fname, 'wb') as f:
                pickle.dump(self._dvcs_cache, f,
                            protocol=2)  # 2 for py2/py3 support
            debug('saved dvcs cache')
        except IOError:
            verbose('failed to save dvcs cache')
Exemplo n.º 9
0
def _copy_tree(src_folder, dst_folder, quiet=False, critical=True):
    if not ensure_dir_exists(dst_folder, quiet=quiet, critical=critical):
        return False

    for entry in os.listdir(src_folder):
        src = os.path.join(src_folder, entry)
        dst = os.path.join(dst_folder, entry)

        if os.path.islink(src):
            target = os.readlink(src)
            if os.path.islink(dst) or os.path.isfile(dst):
                path_remove(dst)

            os.symlink(target, dst)
            _copystat(src, dst)
        elif os.path.isdir(src):
            _copy_tree(src, dst, quiet=quiet, critical=critical)
        else:
            _copyfile(src, dst)
            _copystat(src, dst)

    _copystat(src_folder, dst_folder)

    return True
Exemplo n.º 10
0
    def _stage_license(self, pkg):
        """
        process license files for a specific package processing

        If a package contains one or more files containing licenses information,
        this information will be populated in the package's license folder.

        Args:
            pkg: the package being processed

        Returns:
            ``True`` if the license information was copied; ``False`` if these
            license information could not be copied
        """

        # skip if package has no license files
        if not pkg.license_files:
            if pkg.license and not pkg.is_internal and not pkg.no_extraction:
                warn('package defines no license files: ' + pkg.name)
            return True

        # ensure package-specific license directory exists
        pkg_license_dir = os.path.join(self.opts.license_dir, pkg.nv)
        if not ensure_dir_exists(pkg_license_dir):
            return False

        # copy over each license files
        for file in pkg.license_files:
            src = os.path.join(pkg.build_dir, file)
            dst = os.path.join(pkg_license_dir, file)

            if not path_copy(src, dst, critical=False):
                err('unable to copy license information: ' + pkg.name)
                return False

        return True
Exemplo n.º 11
0
def initialize_sample(opts):
    """
    initialize a sample project

    Generates a sample provided in the root directory to help new users or new
    project get started.

    Args:
        opts: options for this run

    Returns:
        ``True`` if the sample project could be initialized; ``False`` if an
        issue has occurred generating the sample project
    """

    root_dir = opts.root_dir

    if not ensure_dir_exists(root_dir):
        return False

    if os.listdir(root_dir):
        err('unable to initialize sample project is non-empty directory')
        return False

    sample_dir = os.path.join(root_dir, 'package', 'sample')

    success = True
    if ensure_dir_exists(sample_dir):
        # sample project
        sample_defs = os.path.join(root_dir, 'package', 'sample', 'sample')
        try:
            with open(sample_defs, 'w') as f:
                f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

SAMPLE_DEPENDENCIES = []
SAMPLE_LICENSE = ['<license name>']
SAMPLE_LICENSE_FILES = ['<license file>']
SAMPLE_SITE = '<location for sources>'
SAMPLE_TYPE = '<package-type>'
SAMPLE_VERSION = '<package-version>'
''')

            verbose('written sample file')
        except IOError as e:
            err('unable to generate a sample file')
            verbose(str(e))
            success = False
    else:
        success = False

    # .gitignore
    try:
        project_gitignore = os.path.join(root_dir,
                                         '.gitignore')  # (assumption)
        with open(project_gitignore, 'w') as f:
            f.write('''\
# releng-tool
/cache/
/dl/
/output/
.releng-flag-*
''')

        verbose('written .gitignore file')
    except IOError as e:
        err('unable to generate a .gitignore file')
        verbose(str(e))
        success = False

    # releng project
    try:
        project_defs = os.path.join(root_dir, 'releng')
        with open(project_defs, 'w') as f:
            f.write('''\
#!/usr/bin/env python
# -*- coding: utf-8 -*-

packages = [
    'sample',
]
''')

        verbose('written releng file')
    except IOError as e:
        err('unable to generate a releng file')
        verbose(str(e))
        success = False

    if success:
        log('initialized empty releng-tool project')
    else:
        warn('partially initialized a releng-tool project')
    return success
Exemplo n.º 12
0
def path_copy(src, dst, quiet=False, critical=True, dst_dir=None):
    """
    copy a file or directory into a target file or directory

    This call will attempt to copy a provided file or directory, defined by
    ``src`` into a destination file or directory defined by ``dst``. If ``src``
    is a file, then ``dst`` is considered to be a file or directory; if ``src``
    is a directory, ``dst`` is considered a target directory. If a target
    directory or target file's directory does not exist, it will be
    automatically created. In the event that a file or directory could not be
    copied, an error message will be output to standard error (unless ``quiet``
    is set to ``True``). If ``critical`` is set to ``True`` and the specified
    file/directory could not be copied for any reason, this call will issue a
    system exit (``SystemExit``).

    An example when using in the context of script helpers is as follows:

    .. code-block:: python

        # (stage)
        # my-file
        releng_copy('my-file', 'my-file2')
        # (stage)
        # my-file
        # my-file2
        releng_copy('my-file', 'my-directory/')
        # (stage)
        # my-directory/my-file
        # my-file
        # my-file2
        releng_copy('my-directory/', 'my-directory2/')
        # (stage)
        # my-directory/my-file
        # my-directory2/my-file
        # my-file
        # my-file2

    Args:
        src: the source directory or file
        dst: the destination directory or file\\* (\\*if ``src`` is a file)
        quiet (optional): whether or not to suppress output
        critical (optional): whether or not to stop execution on failure
        dst_dir (optional): force hint that the destination is a directory

    Returns:
        ``True`` if the copy has completed with no error; ``False`` if the copy
        has failed

    Raises:
        SystemExit: if the copy operation fails with ``critical=True``
    """
    success = False
    errmsg = None

    try:
        if os.path.isfile(src):
            attempt_copy = True

            if dst_dir:
                base_dir = dst
            else:
                base_dir = os.path.dirname(dst)

            if not os.path.isdir(base_dir):
                attempt_copy = ensure_dir_exists(base_dir, quiet=quiet)

            if attempt_copy:
                if os.path.isdir(dst):
                    dst = os.path.join(dst, os.path.basename(src))

                if os.path.islink(src):
                    target = os.readlink(src)
                    if os.path.islink(dst) or os.path.isfile(dst):
                        path_remove(dst)

                    os.symlink(target, dst)
                else:
                    _copyfile(src, dst)

                _copystat(src, dst)
                success = True
        elif os.path.exists(src):
            if src == dst:
                errmsg = "'{!s}' and '{!s}' " \
                         "are the same folder".format(src, dst)
            elif _copy_tree(src, dst, quiet=quiet, critical=critical):
                success = True
        else:
            errmsg = 'source does not exist: {}'.format(src)
    except (IOError, ShutilError) as e:
        errmsg = str(e)

    if not quiet and errmsg:
        err('unable to copy source contents to target location\n'
            '    {}', errmsg)

    if not success and critical:
        sys.exit(-1)
    return success
Exemplo n.º 13
0
def fetch(opts):
    """
    support fetching from mercurial sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not HG.exists():
        err('unable to fetch package; hg (mercurial) is not installed')
        return None

    hg_dir = ['--repository', cache_dir]

    # check if we have the target revision; if so, full stop
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        if HG.execute(hg_dir + ['--quiet', 'log', '--rev', revision],
                      cwd=cache_dir,
                      quiet=True):
            return cache_dir

    note('fetching {}...', name)
    sys.stdout.flush()

    # if we have no cache for this repository, build one
    if not os.path.isdir(cache_dir):
        if not ensure_dir_exists(cache_dir):
            return None

        if not HG.execute([
                '--noninteractive', '--verbose', 'clone', '--noupdate', site,
                cache_dir
        ],
                          cwd=cache_dir):
            err('unable to clone mercurial repository')
            return None

    log('fetching most recent sources')
    if not HG.execute(hg_dir + ['--noninteractive', '--verbose', 'pull'],
                      cwd=cache_dir):
        err('unable to fetch from remote repository')
        return None

    log('verifying target revision exists')
    if not HG.execute(hg_dir + ['--quiet', 'log', '--rev', revision],
                      cwd=cache_dir,
                      quiet=True):
        err(
            'unable to find matching revision in repository: {}\n'
            ' (revision: {})', name, revision)
        return None

    return cache_dir
Exemplo n.º 14
0
def fetch(opts):
    """
    support fetching from git sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_dir = opts.cache_dir
    name = opts.name
    revision = opts.revision

    if not GIT.exists():
        err('unable to fetch package; git is not installed')
        return None

    git_dir = '--git-dir=' + cache_dir

    # check if we have the target revision cached; if so, package is ready
    if os.path.isdir(cache_dir) and not opts.ignore_cache:
        erv = revision_exists(git_dir, revision)
        if erv in REVISION_EXISTS:
            # ensure configuration is properly synchronized
            if not _sync_git_configuration(opts):
                return None

            # if no explicit ignore-cache request and if the revision is a
            # branch, force ignore-cache on and allow fetching to proceed
            if opts.ignore_cache is None and erv == GitExistsType.EXISTS_BRANCH:
                opts.ignore_cache = True
            # return cache dir if not verifying or verification succeeds
            elif not opts._git_verify_revision or _verify_revision(
                    git_dir, revision, quiet=True):
                return cache_dir

    note('fetching {}...', name)
    sys.stdout.flush()

    # validate any cache directory (if one exists)
    has_cache, bad_validation = _validate_cache(cache_dir)
    if bad_validation:
        return None

    # if we have no cache for this repository, build one
    if not has_cache:
        if not ensure_dir_exists(cache_dir):
            return None

        if not _create_bare_git_repo(cache_dir):
            return None

    # ensure configuration is properly synchronized
    if not _sync_git_configuration(opts):
        return None

    # fetch sources for this repository
    if not _fetch_srcs(opts, cache_dir, revision, refspecs=opts._git_refspecs):
        return None

    # verify revision (if configured to check it)
    if opts._git_verify_revision:
        if not _verify_revision(git_dir, revision):
            err(
                '''\
failed to validate git revision

Package has been configured to require the verification of the GPG signature
for the target revision. The verification has failed. Ensure that the revision
is signed and that the package's public key has been registered in the system.

      Package: {}
     Revision: {}''', name, revision)
            return None

    # fetch submodules (if configured to do so)
    if opts._git_submodules:
        if not _fetch_submodules(opts, cache_dir, revision):
            return None

    return cache_dir
Exemplo n.º 15
0
def fetch(opts):
    """
    support fetching from bzr sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site

    if not BZR.exists():
        err('unable to fetch package; bzr is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    export_opts = [
        'export',
        cache_file,
        site,
        '--format=tgz',
        '--root=' + name,
        '--revision=' + revision,
    ]

    # some environments may have issue export bzr sources due to certificate
    # issues; this quirk allows injecting certifi-provided certificates for
    # all bzr exports
    if 'releng.bzr.certifi' in opts._quirks:
        global CERTIFI_MISSING_WARNED

        if certifi:
            verbose('performing bzr fetch with certifi certificates')
            pkg_site = certifi.where()
            export_opts.append('-Ossl.ca_certs=' + pkg_site)
        elif not CERTIFI_MISSING_WARNED:
            CERTIFI_MISSING_WARNED = True
            warn('''\
unable to perform bzr fetch with certifi certificates

A quirk has been enabled to export bzr images using certifi
certificates; however, certifi is not installed on this system.
''')

    log('exporting sources')
    if not BZR.execute(export_opts, poll=True):
        err('unable to export module')
        return None

    return cache_file
Exemplo n.º 16
0
def fetch(opts):
    """
    support fetching from cvs sources

    With provided fetch options (``RelengFetchOptions``), the fetch stage will
    be processed.

    Args:
        opts: fetch options

    Returns:
        ``True`` if the fetch stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    name = opts.name
    revision = opts.revision
    site = opts.site
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    cache_stem, __ = interpret_stem_extension(cache_basename)

    if not CVS.exists():
        err('unable to fetch package; cvs is not installed')
        return None

    note('fetching {}...', name)
    sys.stdout.flush()

    try:
        cvsroot, module = site.rsplit(' ', 1)
    except ValueError:
        err('''\
improper cvs site defined

The provided CVS site does not define both the CVSROOT as well as the target
module to checkout. For example:

    :pserver:[email protected]:/var/lib/cvsroot mymodule

 Site: {}''', site)
        return None

    log('checking out sources')
    if not CVS.execute(['-d', cvsroot, 'checkout', '-d', cache_stem,
            '-r', revision, module], cwd=work_dir):
        err('unable to checkout module')
        return None

    cvs_module_dir = os.path.join(work_dir, cache_stem)
    if not os.path.exists(cvs_module_dir):
        err('no sources available for the provided revision')
        return None

    log('caching sources')
    def cvs_filter(info):
        if info.name.endswith('CVS'):
            return None
        return info

    cache_dir = os.path.abspath(os.path.join(cache_file, os.pardir))
    if not ensure_dir_exists(cache_dir):
        return None

    with tarfile.open(cache_file, 'w:gz') as tar:
        tar.add(cvs_module_dir, arcname=cache_stem, filter=cvs_filter)

    return cache_file
Exemplo n.º 17
0
def extract(opts):
    """
    support extraction of an archive into a build directory

    With provided extraction options (``RelengExtractOptions``), the extraction
    stage will be processed. The archive's extension will be used in attempt to
    finding a matching tool/implementation which can be used to extract the
    contents of the file. In the event that the method of extraction cannot be
    determined, it will be assumed that the file is in fact not extractable.
    Files which are not extracted are just copied into the build directly (e.g.
    single resource files).

    Args:
        opts: the extraction options

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    assert opts
    cache_file = opts.cache_file
    strip_count = opts.strip_count
    work_dir = opts.work_dir

    cache_basename = os.path.basename(cache_file)
    __, cache_ext = interpret_stem_extension(cache_basename)

    is_extractable = False
    if cache_ext:
        cache_ext = cache_ext.lower()

        # if the user defines a tool override for this extension type, use
        # whatever the user wants to use (passing the file and directory to
        # extract to)
        extract_override = getattr(opts, '_extract_override', None)
        if extract_override and cache_ext in extract_override:
            is_extractable = True

            tool_cmd = extract_override[cache_ext].format(file=cache_file,
                                                          dir=work_dir)

            if not execute(tool_cmd.split(), cwd=work_dir, critical=False):
                err('unable to extract with tool override\n'
                    ' (command: {})', tool_cmd)
                return None

        # attempt to extract the (compressed) tar archive with the host's
        # tar tool; if it does not exist, we'll fallback to using python's
        # internal implementation (tarfile)
        elif cache_ext.startswith(TAR_SUPPORTED):
            is_extractable = True

            # before attempting to use an external tar command, only allow
            # using it if the `force-local` option is available whenever a
            # colon character is provided, to prevent tar from assuming the
            # path is a remote target
            needs_force_local = False
            if ':' in cache_file:
                needs_force_local = True

            has_extracted = False
            if TAR.exists() and (TAR.force_local or not needs_force_local):
                tar_args = [
                    '--extract',
                    '--file=' + cache_file,
                    '--strip-components={}'.format(strip_count),
                    '--verbose',
                ]

                if needs_force_local:
                    tar_args.append('--force-local')

                if TAR.execute(tar_args, cwd=work_dir):
                    has_extracted = True
                else:
                    warn('unable to extract archive with host tar; '
                         'will use fallback')

            if not has_extracted:
                try:

                    def tar_extract(members, strip_count):
                        for member in members:
                            # strip members from package defined count
                            if strip_count > 0:
                                np = os.path.normpath(member.name)
                                parts = np.split(os.path.sep, strip_count)
                                if len(parts) <= strip_count:
                                    continue
                                member.name = parts[-1]

                            # notify the user of the target member to extract
                            print(member.name)
                            yield member

                    with tarfile.open(cache_file, 'r') as tar:
                        tar.extractall(path=work_dir,
                                       members=tar_extract(tar, strip_count))
                except Exception as e:
                    err(
                        'unable to extract tar file\n'
                        '    {}\n'
                        ' (file: {})\n'
                        ' (target: {})', e, cache_file, work_dir)
                    return False

        # extract a zip-extension cache file using python's internal
        # implementation (zipfile)
        elif cache_ext == 'zip':
            is_extractable = True

            try:
                with ZipFile(cache_file, 'r') as zip_:
                    for member in zip_.namelist():
                        # strip members from package defined count
                        member_s = member
                        if strip_count > 0:
                            np = os.path.normpath(member_s)
                            parts = np.split(os.path.sep, strip_count)
                            if len(parts) <= strip_count:
                                continue
                            member_s = parts[-1]
                        dest = os.path.join(work_dir, member_s)

                        # notify the user of the target member to extract
                        print(member)

                        # if this is a directory entry, ensure the directory
                        # exists for the destination
                        if not os.path.basename(member):
                            ensure_dir_exists(dest)
                        else:
                            # always ensure the container directory for a file
                            # exists before attempting to extract a member into
                            # it, as not all processed zip files may process
                            # a directory entry (to be created) ahead of time
                            ensure_dir_exists(os.path.dirname(dest))

                            with zip_.open(member) as s, open(dest, 'wb') as f:
                                shutil.copyfileobj(s, f)

            except Exception as e:
                err(
                    'unable to extract zip file\n'
                    '    {}\n'
                    ' (file: {})\n'
                    ' (target: {})', e, cache_file, work_dir)
                return False

    if not is_extractable:
        debug('file not considered extractable: ' + cache_file)
        try:
            shutil.copy2(cache_file, work_dir)
        except IOError as e:
            err(
                'unable to copy over cache file\n'
                '    {}\n'
                ' (file: {})\n'
                ' (target: {})', e, cache_file, work_dir)
            return False

    return True
Exemplo n.º 18
0
def stage(engine, pkg, ignore_cache, extra_opts):
    """
    handles the fetching stage for a package

    With a provided engine and package instance, the fetching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being fetched
        ignore_cache: always attempt to ignore the cache
        extra_opts: extra options for the fetch operation (if applicable)

    Returns:
        ``True`` if the fetching stage is completed; ``False`` otherwise
    """
    assert pkg.vcs_type
    name = pkg.name
    debug('process fetch stage: ' + name)

    # packages flagged for local sources requires to be already checked out
    if pkg.local_srcs:
        if os.path.isdir(pkg.build_dir):
            return True

        err(
            '''\
missing local sources for internal package: {0}

The active configuration is flagged for 'local sources' mode; however, an
internal package cannot be found in the local system. Before continuing, ensure
you have checked out all internal packages on your local system (or, disable the
local sources option to use the default process).

       Package: {0}
 Expected Path: {1}''', name, pkg.build_dir)
        return False

    # if the vcs-type is archive-based, flag that hash checks are needed
    perform_file_asc_check = False
    perform_file_hash_check = False
    if pkg.vcs_type == VcsType.URL:
        perform_file_asc_check = os.path.exists(pkg.asc_file)
        perform_file_hash_check = True

    fetch_opts = RelengFetchOptions()
    replicate_package_attribs(fetch_opts, pkg)
    fetch_opts.cache_dir = pkg.cache_dir
    fetch_opts.ext = pkg.ext_modifiers
    fetch_opts.extra_opts = extra_opts
    fetch_opts.ignore_cache = ignore_cache
    fetch_opts.name = name
    fetch_opts.revision = pkg.revision
    fetch_opts.site = pkg.site
    fetch_opts.version = pkg.version
    fetch_opts._mirror = False
    fetch_opts._quirks = engine.opts.quirks
    fetch_opts._urlopen_context = engine.opts.urlopen_context

    cache_filename = os.path.basename(pkg.cache_file)
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir, \
            generate_temp_dir(out_dir) as interim_cache_dir:
        with interim_working_dir(work_dir):
            interim_cache_file = os.path.join(interim_cache_dir,
                                              cache_filename)
            fetch_opts.cache_file = interim_cache_file
            fetch_opts.work_dir = work_dir

            # check if file caching should be ignored
            #
            # In special cases, a developer may configure a project to have a
            # fetched source not to cache. For example, pulling from a branch of
            # a VCS source will make a cache file from the branch and will
            # remain until manually removed from a cache file. A user may wish
            # to re-build the local cache file after cleaning their project.
            # While the releng-tool framework separates fetching/extraction into
            # two parts, ignoring cached assets can be partially achieved by
            # just removing any detected cache file if a project is configured
            # to ignore a cache file.
            if engine.opts.devmode and pkg.devmode_ignore_cache is not None:
                fetch_opts.ignore_cache = pkg.devmode_ignore_cache

                if pkg.devmode_ignore_cache and os.path.exists(pkg.cache_file):
                    verbose('removing cache file (per configuration): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # remove cache file if there is a force request to ignore the cache
            elif engine.opts.force and ignore_cache:
                if os.path.exists(pkg.cache_file):
                    verbose('removing cache file (forced): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # force explicit ignore cache (to off) when not in development mode
            elif not engine.opts.devmode and ignore_cache is None:
                fetch_opts.ignore_cache = False

            if os.path.exists(pkg.cache_file):
                rv = None
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file,
                                       pkg.cache_file,
                                       relaxed=True)

                    if hr == HashResult.VERIFIED:
                        rv = True
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                        rv = True  # no hash file to compare with; assuming ok
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                        rv = True  # empty hash file; assuming ok
                    elif hr == HashResult.MISMATCH:
                        if not path_remove(pkg.cache_file):
                            rv = False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        rv = False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            rv = False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        rv = False
                else:
                    rv = True

                if rv is not False and perform_file_asc_check and \
                        os.path.exists(pkg.cache_file):
                    if GPG.validate(pkg.asc_file, pkg.cache_file):
                        rv = True
                    else:
                        if not path_remove(pkg.cache_file):
                            err(
                                '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

 ASC File: {}
     File: {}''', pkg.asc_file, cache_filename)
                            rv = False
                        else:
                            rv = None

                if rv is not None:
                    if ignore_cache:
                        verbose('ignoring cache not supported for package: {}',
                                name)
                    return rv

            # find fetching method for the target vcs-type
            fetcher = None
            if pkg.vcs_type in engine.registry.fetch_types:

                def _(opts):
                    return engine.registry.fetch_types[pkg.vcs_type].fetch(
                        pkg.vcs_type, opts)

                fetcher = _
            elif pkg.vcs_type == VcsType.BZR:
                fetcher = fetch_bzr
            elif pkg.vcs_type == VcsType.CVS:
                fetcher = fetch_cvs
            elif pkg.vcs_type == VcsType.GIT:
                fetcher = fetch_git
            elif pkg.vcs_type == VcsType.HG:
                fetcher = fetch_mercurial
            elif pkg.vcs_type == VcsType.RSYNC:
                fetcher = fetch_rsync
            elif pkg.vcs_type == VcsType.SCP:
                fetcher = fetch_scp
            elif pkg.vcs_type == VcsType.SVN:
                fetcher = fetch_svn
            elif pkg.vcs_type == VcsType.URL:
                fetcher = fetch_url

            if not fetcher:
                err('fetch type is not implemented: {}', pkg.vcs_type)
                return False

            # if this is url-type location, attempt to search on the mirror
            # first (if configured)
            fetched = None
            if engine.opts.url_mirror and pkg.vcs_type == VcsType.URL:
                original_site = fetch_opts.site
                new_site = engine.opts.url_mirror + cache_filename
                if original_site != new_site:
                    fetch_opts._mirror = True

                    fetch_opts.site = new_site
                    fetched = fetcher(fetch_opts)
                    fetch_opts.site = original_site

                    fetch_opts._mirror = False

            # perform the fetch request (if not already fetched)
            if not fetched:
                fetched = fetcher(fetch_opts)
                if not fetched:
                    return False

            # if the fetch type has populated the package's cache directory
            # directly, we are done
            if fetched == pkg.cache_dir:
                pass
            # if the fetch type has returned a file, the file needs to be hash
            # checked and then be moved into the download cache
            elif fetched == interim_cache_file:
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file, fetched)
                    if hr == HashResult.VERIFIED:
                        pass
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                    elif hr == HashResult.MISMATCH:
                        return False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        return False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            return False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        return False

                if perform_file_asc_check:
                    if not GPG.validate(pkg.asc_file, interim_cache_file):
                        err(
                            '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

     ASC File: {}
         File: {}''', pkg.asc_file, cache_filename)
                        return False

                debug('fetch successful; moving cache file')

                # ensure the cache container/directory exists
                cache_dir = os.path.dirname(pkg.cache_file)
                if not ensure_dir_exists(cache_dir):
                    return False

                try:
                    shutil.move(interim_cache_file, pkg.cache_file)
                except shutil.Error:
                    err(
                        'invalid fetch operation (internal error; fetch mode '
                        '"{}" has provided a missing cache file)',
                        pkg.vcs_type)
                    return False
            else:
                err(
                    'invalid fetch operation (internal error; fetch mode "{}" '
                    'has returned an unsupported value)', pkg.vcs_type)
                return False

    return True