Exemplo n.º 1
0
    def run(self, result=None):
        """
        run the test

        Run the test, collecting the result into the TestResult object passed as
        result. See `unittest.TestCase.run()` for more details.

        Args:
            result (optional): the test result to populate
        """

        with generate_temp_dir() as repo_dir, interim_working_dir(repo_dir):
            self.repo_dir = repo_dir
            self.prepare_repo_dir(repo_dir)

            with prepare_testenv(template=self.tool_template()) as engine:
                opts = engine.opts

                opts.gbl_action = self.prepare_global_action()

                self.cache_dir = os.path.join(opts.cache_dir, PKG_NAME)
                self.defconfig = os.path.join(opts.root_dir, PKG_DEFINITION)
                self.engine = engine

                self.defconfig_add('SITE', self.repo_dir)
                self.prepare_defconfig(self.defconfig)

                super(TestSiteToolBase, self).run(result)
Exemplo n.º 2
0
    def test_tool_git_submodules_default(self):
        self.defconfig_add('VERSION', DEFAULT_BRANCH)

        with generate_temp_dir() as repo2:
            # prepare additional mock repository directories
            repo1 = self.repo_dir
            self.prepare_repo_dir(repo2)

            # dummy file on repo1
            touch(os.path.join(repo1, 'file1'))
            self._create_commit('add file', repo=repo1, add=True)

            # dummy file on repo2
            touch(os.path.join(repo2, 'file2'))
            self._create_commit('add file', repo=repo2, add=True)

            # add a submodule repo2 to repo1
            self._git_repo('submodule', 'add', repo2, 'repo2', repo=repo1)
            self._create_commit('add module', repo=repo1, add=True)

            # extract package but not submodules (by default)
            self.engine.opts.gbl_action = GlobalAction.EXTRACT
            rv = self.engine.run()
            self.assertTrue(rv)

            # verify expected content from main package; not submodules
            out_dir = os.path.join(self.engine.opts.build_dir,
                                   'test-' + DEFAULT_BRANCH)
            repo1_file = os.path.join(out_dir, 'file1')
            repo2_file = os.path.join(out_dir, 'repo2', 'file2')
            self.assertTrue(os.path.exists(repo1_file))
            self.assertFalse(os.path.exists(repo2_file))
Exemplo n.º 3
0
def prepare_workdir():
    """
    prepare a working directory for a test

    This utility method is used to provide a test a directory to store
    output files. This method will ensure the container directory is emptied
    before returning.

    Returns:
        the container directory
    """

    with generate_temp_dir() as work_dir:
        yield work_dir
Exemplo n.º 4
0
def prepare_testenv(config=None, template=None, args=None):
    """
    prepare an engine-ready environment for a test

    This utility method is used to provide an `RelengEngine` instance ready for
    execution on an interim working directory.

    Args:
        config (optional): dictionary of options to mock for arguments
        template (optional): the folder holding a template project to copy into
                              the prepared working directory
        args (optional): additional arguments to add to the "forwarded options"

    Yields:
        the engine
    """
    class MockArgs(object):
        def __getattr__(self, name):
            return self.name if name in self.__dict__ else None

    if config is None:
        config = {}

    with generate_temp_dir() as work_dir, interim_working_dir(work_dir):
        # force root directory to temporary directory; or configure all working
        # content based off the generated temporary directory
        if 'root_dir' not in config:
            config['root_dir'] = work_dir
        else:
            if 'cache_dir' not in config:
                config['cache_dir'] = os.path.join(work_dir, 'cache')
            if 'dl_dir' not in config:
                config['dl_dir'] = os.path.join(work_dir, 'dl')
            if 'out_dir' not in config:
                config['out_dir'] = os.path.join(work_dir, 'out')

        if template:
            copy_template(template, work_dir)

        # build arguments instance
        test_args = MockArgs()
        for k, v in config.items():
            setattr(test_args, k, v)

        # prepare engine options and build an engine instance
        opts = RelengEngineOptions(args=test_args, forward_args=args)
        engine = RelengEngine(opts)

        yield engine
Exemplo n.º 5
0
def stage(engine, pkg, ignore_cache, extra_opts):
    """
    handles the fetching stage for a package

    With a provided engine and package instance, the fetching stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being fetched
        ignore_cache: always attempt to ignore the cache
        extra_opts: extra options for the fetch operation (if applicable)

    Returns:
        ``True`` if the fetching stage is completed; ``False`` otherwise
    """
    assert pkg.vcs_type
    name = pkg.name
    debug('process fetch stage: ' + name)

    # packages flagged for local sources requires to be already checked out
    if pkg.local_srcs:
        if os.path.isdir(pkg.build_dir):
            return True

        err(
            '''\
missing local sources for internal package: {0}

The active configuration is flagged for 'local sources' mode; however, an
internal package cannot be found in the local system. Before continuing, ensure
you have checked out all internal packages on your local system (or, disable the
local sources option to use the default process).

       Package: {0}
 Expected Path: {1}''', name, pkg.build_dir)
        return False

    # if the vcs-type is archive-based, flag that hash checks are needed
    perform_file_asc_check = False
    perform_file_hash_check = False
    if pkg.vcs_type == VcsType.URL:
        perform_file_asc_check = os.path.exists(pkg.asc_file)
        perform_file_hash_check = True

    fetch_opts = RelengFetchOptions()
    replicate_package_attribs(fetch_opts, pkg)
    fetch_opts.cache_dir = pkg.cache_dir
    fetch_opts.ext = pkg.ext_modifiers
    fetch_opts.extra_opts = extra_opts
    fetch_opts.ignore_cache = ignore_cache
    fetch_opts.name = name
    fetch_opts.revision = pkg.revision
    fetch_opts.site = pkg.site
    fetch_opts.version = pkg.version
    fetch_opts._mirror = False
    fetch_opts._quirks = engine.opts.quirks
    fetch_opts._urlopen_context = engine.opts.urlopen_context

    cache_filename = os.path.basename(pkg.cache_file)
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir, \
            generate_temp_dir(out_dir) as interim_cache_dir:
        with interim_working_dir(work_dir):
            interim_cache_file = os.path.join(interim_cache_dir,
                                              cache_filename)
            fetch_opts.cache_file = interim_cache_file
            fetch_opts.work_dir = work_dir

            # check if file caching should be ignored
            #
            # In special cases, a developer may configure a project to have a
            # fetched source not to cache. For example, pulling from a branch of
            # a VCS source will make a cache file from the branch and will
            # remain until manually removed from a cache file. A user may wish
            # to re-build the local cache file after cleaning their project.
            # While the releng-tool framework separates fetching/extraction into
            # two parts, ignoring cached assets can be partially achieved by
            # just removing any detected cache file if a project is configured
            # to ignore a cache file.
            if engine.opts.devmode and pkg.devmode_ignore_cache is not None:
                fetch_opts.ignore_cache = pkg.devmode_ignore_cache

                if pkg.devmode_ignore_cache and os.path.exists(pkg.cache_file):
                    verbose('removing cache file (per configuration): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # remove cache file if there is a force request to ignore the cache
            elif engine.opts.force and ignore_cache:
                if os.path.exists(pkg.cache_file):
                    verbose('removing cache file (forced): ' + name)
                    if not path_remove(pkg.cache_file):
                        return False

            # force explicit ignore cache (to off) when not in development mode
            elif not engine.opts.devmode and ignore_cache is None:
                fetch_opts.ignore_cache = False

            if os.path.exists(pkg.cache_file):
                rv = None
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file,
                                       pkg.cache_file,
                                       relaxed=True)

                    if hr == HashResult.VERIFIED:
                        rv = True
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                        rv = True  # no hash file to compare with; assuming ok
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                        rv = True  # empty hash file; assuming ok
                    elif hr == HashResult.MISMATCH:
                        if not path_remove(pkg.cache_file):
                            rv = False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        rv = False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            rv = False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        rv = False
                else:
                    rv = True

                if rv is not False and perform_file_asc_check and \
                        os.path.exists(pkg.cache_file):
                    if GPG.validate(pkg.asc_file, pkg.cache_file):
                        rv = True
                    else:
                        if not path_remove(pkg.cache_file):
                            err(
                                '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

 ASC File: {}
     File: {}''', pkg.asc_file, cache_filename)
                            rv = False
                        else:
                            rv = None

                if rv is not None:
                    if ignore_cache:
                        verbose('ignoring cache not supported for package: {}',
                                name)
                    return rv

            # find fetching method for the target vcs-type
            fetcher = None
            if pkg.vcs_type in engine.registry.fetch_types:

                def _(opts):
                    return engine.registry.fetch_types[pkg.vcs_type].fetch(
                        pkg.vcs_type, opts)

                fetcher = _
            elif pkg.vcs_type == VcsType.BZR:
                fetcher = fetch_bzr
            elif pkg.vcs_type == VcsType.CVS:
                fetcher = fetch_cvs
            elif pkg.vcs_type == VcsType.GIT:
                fetcher = fetch_git
            elif pkg.vcs_type == VcsType.HG:
                fetcher = fetch_mercurial
            elif pkg.vcs_type == VcsType.RSYNC:
                fetcher = fetch_rsync
            elif pkg.vcs_type == VcsType.SCP:
                fetcher = fetch_scp
            elif pkg.vcs_type == VcsType.SVN:
                fetcher = fetch_svn
            elif pkg.vcs_type == VcsType.URL:
                fetcher = fetch_url

            if not fetcher:
                err('fetch type is not implemented: {}', pkg.vcs_type)
                return False

            # if this is url-type location, attempt to search on the mirror
            # first (if configured)
            fetched = None
            if engine.opts.url_mirror and pkg.vcs_type == VcsType.URL:
                original_site = fetch_opts.site
                new_site = engine.opts.url_mirror + cache_filename
                if original_site != new_site:
                    fetch_opts._mirror = True

                    fetch_opts.site = new_site
                    fetched = fetcher(fetch_opts)
                    fetch_opts.site = original_site

                    fetch_opts._mirror = False

            # perform the fetch request (if not already fetched)
            if not fetched:
                fetched = fetcher(fetch_opts)
                if not fetched:
                    return False

            # if the fetch type has populated the package's cache directory
            # directly, we are done
            if fetched == pkg.cache_dir:
                pass
            # if the fetch type has returned a file, the file needs to be hash
            # checked and then be moved into the download cache
            elif fetched == interim_cache_file:
                if perform_file_hash_check:
                    hr = verify_hashes(pkg.hash_file, fetched)
                    if hr == HashResult.VERIFIED:
                        pass
                    elif hr == HashResult.BAD_PATH:
                        if not perform_file_asc_check and not pkg.is_internal:
                            warn('missing hash file for package: ' + name)
                    elif hr == HashResult.EMPTY:
                        if not pkg.is_internal:
                            warn('hash file for package is empty: ' + name)
                    elif hr == HashResult.MISMATCH:
                        return False
                    elif hr in (HashResult.BAD_FORMAT, HashResult.UNSUPPORTED):
                        return False
                    elif hr == HashResult.MISSING_ARCHIVE:
                        if not perform_file_asc_check:
                            err(
                                '''\
missing archive hash for verification

The hash file for this package does not have an entry for the cache file to be
verified. Ensure the hash file defines an entry for the expected cache file:

    Hash File: {}
         File: {}''', pkg.hash_file, cache_filename)
                            return False
                    else:
                        err(
                            'invalid fetch operation (internal error; '
                            'hash-check failure: {})', hr)
                        return False

                if perform_file_asc_check:
                    if not GPG.validate(pkg.asc_file, interim_cache_file):
                        err(
                            '''\
failed to validate against ascii-armor

Validation of a package resource failed to verify against a provided ASCII-armor
file. Ensure that the package's public key has been registered into gpg.

     ASC File: {}
         File: {}''', pkg.asc_file, cache_filename)
                        return False

                debug('fetch successful; moving cache file')

                # ensure the cache container/directory exists
                cache_dir = os.path.dirname(pkg.cache_file)
                if not ensure_dir_exists(cache_dir):
                    return False

                try:
                    shutil.move(interim_cache_file, pkg.cache_file)
                except shutil.Error:
                    err(
                        'invalid fetch operation (internal error; fetch mode '
                        '"{}" has provided a missing cache file)',
                        pkg.vcs_type)
                    return False
            else:
                err(
                    'invalid fetch operation (internal error; fetch mode "{}" '
                    'has returned an unsupported value)', pkg.vcs_type)
                return False

    return True
Exemplo n.º 6
0
def stage(engine, pkg):
    """
    handles the extraction stage for a package

    With a provided engine and package instance, the extraction stage will be
    processed.

    Args:
        engine: the engine
        pkg: the package being extracted

    Returns:
        ``True`` if the extraction stage is completed; ``False`` otherwise
    """

    # packages flagged for local sources do not have an extraction stage
    if pkg.local_srcs:
        return True

    # skip packages flagged not to extract
    if pkg.no_extraction:
        return True

    note('extracting {}...', pkg.name)
    sys.stdout.flush()

    extract_opts = RelengExtractOptions()
    replicate_package_attribs(extract_opts, pkg)
    extract_opts.cache_dir = pkg.cache_dir
    extract_opts.cache_file = pkg.cache_file
    extract_opts.ext = pkg.ext_modifiers
    extract_opts.name = pkg.name
    extract_opts.revision = pkg.revision
    extract_opts.strip_count = pkg.strip_count
    extract_opts.version = pkg.version
    extract_opts._extract_override = engine.opts.extract_override
    extract_opts._quirks = engine.opts.quirks

    if os.path.exists(pkg.build_dir):
        warn('build directory exists before extraction; removing')

        if not path_remove(pkg.build_dir):
            err('unable to cleanup build directory: ' + pkg.build_dir)
            return False

    # prepare and step into the a newly created working directory
    #
    # An extractor will take the contents of an archive, cache directory or
    # other fetched content and populate the "work" directory. On successful
    # extraction (or moving resources), the work directory will be moved to the
    # package's respective build directory.
    out_dir = engine.opts.out_dir
    with generate_temp_dir(out_dir) as work_dir:
        with interim_working_dir(work_dir):
            extract_opts.work_dir = work_dir

            extracter = None
            hash_exclude = []
            extract_types = engine.registry.extract_types
            if pkg.extract_type and pkg.extract_type in extract_types:
                def _(opts):
                    return engine.registry.extract_types[pkg.vcs_type].extract(
                        pkg.vcs_type, opts)
                extracter = _
            elif pkg.vcs_type in extract_types:
                extracter = extract_types[pkg.vcs_type].extract
            elif pkg.vcs_type == VcsType.GIT:
                extracter = extract_git
            elif pkg.vcs_type == VcsType.HG:
                extracter = extract_mercurial
            elif os.path.isfile(pkg.cache_file):
                cache_basename = os.path.basename(pkg.cache_file)
                hash_exclude.append(cache_basename)
                extracter = extract_archive

            if not extracter:
                err('extract type is not implemented: {}', pkg.vcs_type)
                return False

            # perform the extract request
            extracted = extracter(extract_opts)
            if not extracted:
                return False

            result = verify_hashes(pkg.hash_file, work_dir, hash_exclude)
            if result == HashResult.VERIFIED:
                pass
            elif result == HashResult.BAD_PATH:
                if not pkg.is_internal:
                    warn('missing hash file for package: ' + pkg.name)
            elif result == HashResult.EMPTY:
                if not pkg.is_internal:
                    verbose('hash file for package is empty: ' + pkg.name)
            elif result in (HashResult.BAD_FORMAT, HashResult.MISMATCH,
                    HashResult.MISSING_LISTED, HashResult.UNSUPPORTED):
                return False
            else:
                err('invalid extract operation (internal error; '
                    'hash-check failure: {})', result)
                return False

        debug('extraction successful; moving sources into package output '
            'directory: ' + pkg.build_dir)
        shutil.move(work_dir, pkg.build_dir)

    return True
Exemplo n.º 7
0
    def test_tool_git_submodules_branch_revision(self):
        self.defconfig_add('GIT_SUBMODULES', True)
        self.defconfig_add('VERSION', DEFAULT_BRANCH)

        with generate_temp_dir() as repo2:
            # prepare additional mock repository directories
            repo1 = self.repo_dir
            self.prepare_repo_dir(repo2)

            # dummy file on repo1
            touch(os.path.join(repo1, 'file1'))
            self._create_commit('add file', repo=repo1, add=True)

            # dummy files on repo2 for two branches
            CUSTOM_BRANCH = 'custom'

            touch(os.path.join(repo2, 'file2'))
            self._create_commit('add file', repo=repo2, add=True)

            self._git_repo('checkout', '-b', CUSTOM_BRANCH, repo=repo2)

            touch(os.path.join(repo2, 'file3'))
            self._create_commit('add file', repo=repo2, add=True)

            self._git_repo('checkout', DEFAULT_BRANCH, repo=repo2)

            # add a submodule repo2 to repo1
            self._git_repo('submodule', 'add', repo2, 'repo2', repo=repo1)
            self._create_commit('add module', repo=repo1, add=True)

            # extract package but not submodules (by default)
            self.engine.opts.gbl_action = GlobalAction.EXTRACT
            rv = self.engine.run()
            self.assertTrue(rv)

            # verify expected content from main package; not submodules
            out_dir = os.path.join(self.engine.opts.build_dir,
                                   'test-' + DEFAULT_BRANCH)
            repo1_file = os.path.join(out_dir, 'file1')
            repo2_file = os.path.join(out_dir, 'repo2', 'file2')
            self.assertTrue(os.path.exists(repo1_file))
            self.assertTrue(os.path.exists(repo2_file))

            # cleanup
            self.cleanup_outdir()

            # adjust submodule to target the custom branch
            self._git_repo('config',
                           '-f',
                           '.gitmodules',
                           'submodule.repo2.branch',
                           CUSTOM_BRANCH,
                           repo=repo1)

            # force a fetch (which should update the cache)
            self.engine.opts.gbl_action = GlobalAction.FETCH
            rv = self.engine.run()
            self.assertTrue(rv)

            # re-extract package and submodules
            self.engine.opts.gbl_action = GlobalAction.EXTRACT
            rv = self.engine.run()
            self.assertTrue(rv)

            # verify expected content from main package and submodules
            repo3_file = os.path.join(out_dir, 'repo2', 'file3')
            self.assertTrue(os.path.exists(repo3_file))