Exemple #1
0
    def find_paths_and_metadata(self, verbose, detect_paths):
        paths, tags, flavors = set(), set(), set()
        changed_files = self.vcs.files_changed
        if changed_files and detect_paths:
            if verbose:
                print("Pushing tests based on modifications to the "
                      "following files:\n\t%s" % "\n\t".join(changed_files))

            from mozbuild.frontend.reader import (
                BuildReader,
                EmptyConfig,
            )

            config = EmptyConfig(self.topsrcdir)
            reader = BuildReader(config)
            files_info = reader.files_info(changed_files)

            for path, info in files_info.items():
                paths |= info.test_files
                tags |= info.test_tags
                flavors |= info.test_flavors

            if verbose:
                if paths:
                    print("Pushing tests based on the following patterns:\n\t%s" %
                          "\n\t".join(paths))
                if tags:
                    print("Pushing tests based on the following tags:\n\t%s" %
                          "\n\t".join(tags))

        return {
            'paths': paths,
            'tags': tags,
            'flavors': flavors,
        }
def gen_test_backend():
    build_obj = MozbuildObject.from_environment()
    try:
        config = build_obj.config_environment
    except BuildEnvironmentNotFoundException:
        # Create a stub config.status file, since the TestManifest backend needs
        # to be re-created if configure runs. If the file doesn't exist,
        # mozbuild continually thinks the TestManifest backend is out of date
        # and tries to regenerate it.

        if not os.path.isdir(build_obj.topobjdir):
            os.makedirs(build_obj.topobjdir)

        config_status = mozpath.join(build_obj.topobjdir, "config.status")
        open(config_status, "w").close()

        print("No build detected, test metadata may be incomplete.")

        # If 'JS_STANDALONE' is set, tests that don't require an objdir won't
        # be picked up due to bug 1345209.
        substs = EmptyConfig.default_substs
        if "JS_STANDALONE" in substs:
            del substs["JS_STANDALONE"]

        config = EmptyConfig(build_obj.topsrcdir, substs)
        config.topobjdir = build_obj.topobjdir

    reader = BuildReader(config)
    emitter = TreeMetadataEmitter(config)
    backend = TestManifestBackend(config)

    context = reader.read_topsrcdir()
    data = emitter.emit(context, emitfn=emitter._process_test_manifests)
    backend.consume(data)
Exemple #3
0
    def find_paths_and_tags(self, verbose):
        paths, tags = set(), set()
        changed_files = self.find_changed_files()
        if changed_files:
            if verbose:
                print("Pushing tests based on modifications to the "
                      "following files:\n\t%s" % "\n\t".join(changed_files))

            from mozbuild.frontend.reader import (
                BuildReader,
                EmptyConfig,
            )

            config = EmptyConfig(self.topsrcdir)
            reader = BuildReader(config)
            files_info = reader.files_info(changed_files)

            for path, info in files_info.items():
                paths |= info.test_files
                tags |= info.test_tags

            if verbose:
                if paths:
                    print("Pushing tests based on the following patterns:\n\t%s" %
                          "\n\t".join(paths))
                if tags:
                    print("Pushing tests based on the following tags:\n\t%s" %
                          "\n\t".join(tags))
        return paths, tags
Exemple #4
0
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('mdn-sphinx-theme==0.4')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir,
                                os.path.join(self.topsrcdir, 'tools', 'docs'),
                                outdir)

        # We don't care about GYP projects, so don't process them. This makes
        # scanning faster and may even prevent an exception.
        def remove_gyp_dirs(context):
            context['GYP_DIRS'][:] = []

        reader = BuildReader(self.config_environment,
                             sandbox_post_eval_cb=remove_gyp_dirs)

        for path, name, key, value in reader.find_sphinx_variables():
            reldir = os.path.dirname(path)

            if name == 'SPHINX_TREES':
                assert key
                manager.add_tree(os.path.join(reldir, value),
                                 os.path.join(reldir, key))

            if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                manager.add_python_package_dir(os.path.join(reldir, value))

        return manager.generate_docs(format)
Exemple #5
0
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('mdn-sphinx-theme==0.4')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir, os.path.join(self.topsrcdir,
            'tools', 'docs'), outdir)

        # We don't care about GYP projects, so don't process them. This makes
        # scanning faster and may even prevent an exception.
        def remove_gyp_dirs(context):
            context['GYP_DIRS'][:] = []

        reader = BuildReader(self.config_environment,
            sandbox_post_eval_cb=remove_gyp_dirs)

        for context in reader.walk_topsrcdir():
            for dest_dir, source_dir in context['SPHINX_TREES'].items():
                manager.add_tree(os.path.join(context.relsrcdir,
                    source_dir), dest_dir)

            for entry in context['SPHINX_PYTHON_PACKAGE_DIRS']:
                manager.add_python_package_dir(os.path.join(context.relsrcdir,
                    entry))

        return manager.generate_docs(format)
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('mdn-sphinx-theme==0.4')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir, os.path.join(self.topsrcdir,
            'tools', 'docs'), outdir)

        # We don't care about GYP projects, so don't process them. This makes
        # scanning faster and may even prevent an exception.
        def remove_gyp_dirs(context):
            context['GYP_DIRS'][:] = []

        reader = BuildReader(self.config_environment,
            sandbox_post_eval_cb=remove_gyp_dirs)

        for path, name, key, value in reader.find_sphinx_variables():
            reldir = os.path.dirname(path)

            if name == 'SPHINX_TREES':
                assert key
                manager.add_tree(os.path.join(reldir, value),
                        os.path.join(reldir, key))

            if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                manager.add_python_package_dir(os.path.join(reldir, value))

        return manager.generate_docs(format)
Exemple #7
0
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('mdn-sphinx-theme==0.4')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir,
                                os.path.join(self.topsrcdir, 'tools', 'docs'),
                                outdir)

        # We don't care about GYP projects, so don't process them. This makes
        # scanning faster and may even prevent an exception.
        def remove_gyp_dirs(sandbox):
            sandbox['GYP_DIRS'][:] = []

        reader = BuildReader(self.config_environment,
                             sandbox_post_eval_cb=remove_gyp_dirs)

        for sandbox in reader.walk_topsrcdir():
            for dest_dir, source_dir in sandbox['SPHINX_TREES'].items():
                manager.add_tree(
                    os.path.join(sandbox['RELATIVEDIR'], source_dir), dest_dir)

            for entry in sandbox['SPHINX_PYTHON_PACKAGE_DIRS']:
                manager.add_python_package_dir(
                    os.path.join(sandbox['RELATIVEDIR'], entry))

        return manager.generate_docs(format)
Exemple #8
0
    def test_orphan_file_patterns(self):
        if sys.platform == 'win32':
            raise unittest.SkipTest('failing on windows builds')

        mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)

        try:
            config = mb.config_environment
        except Exception as e:
            if str(e) == 'config.status not available. Run configure.':
                raise unittest.SkipTest('failing without config.status')
            raise

        if config.substs['MOZ_BUILD_APP'] == 'js':
            raise unittest.SkipTest('failing in Spidermonkey builds')

        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        _, contexts = reader.read_relevant_mozbuilds(all_paths)

        finder = FileFinder(config.topsrcdir, ignore=['obj*'])

        def pattern_exists(pat):
            return [p for p in finder.find(pat)] != []

        for ctx in contexts:
            if not isinstance(ctx, Files):
                continue
            relsrcdir = ctx.relsrcdir
            for p in ctx.patterns:
                if not pattern_exists(os.path.join(relsrcdir, p)):
                    self.fail("The pattern '%s' in a Files() entry in "
                              "'%s' corresponds to no files in the tree.\n"
                              "Please update this entry." %
                              (p, ctx.main_path))
Exemple #9
0
    def read_build_config(self):
        """Read the active build config and add docs to this instance."""

        # Reading the Sphinx variables doesn't require a full build context.
        # Only define the parts we need.
        class fakeconfig(object):
            def __init__(self, topsrcdir):
                self.topsrcdir = topsrcdir

        config = fakeconfig(self._topsrcdir)
        reader = BuildReader(config)

        for path, name, key, value in reader.find_sphinx_variables():
            reldir = os.path.dirname(path)

            if name == 'SPHINX_TREES':
                assert key
                if key.startswith('/'):
                    key = key[1:]
                else:
                    key = os.path.join(reldir, key)
                self.add_tree(os.path.join(reldir, value), key)

            if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                self.add_python_package_dir(os.path.join(reldir, value))
Exemple #10
0
    def read_build_config(self):
        """Read the active build config and add docs to this instance."""

        # Reading the Sphinx variables doesn't require a full build context.
        # Only define the parts we need.
        class fakeconfig(object):
            def __init__(self, topsrcdir):
                self.topsrcdir = topsrcdir

        config = fakeconfig(self._topsrcdir)
        reader = BuildReader(config)

        for path, name, key, value in reader.find_sphinx_variables():
            reldir = os.path.dirname(path)

            if name == 'SPHINX_TREES':
                assert key
                if key.startswith('/'):
                    key = key[1:]
                else:
                    key = os.path.join(reldir, key)
                self.add_tree(os.path.join(reldir, value), key)

            if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                self.add_python_package_dir(os.path.join(reldir, value))
Exemple #11
0
    def _emit(self, name, env=None):
        if not env:
            env = self._get_environment(name)

        reader = BuildReader(env)
        emitter = TreeMetadataEmitter(env)

        return env, emitter.emit(reader.read_topsrcdir())
Exemple #12
0
    def file_info_schedules(self, paths):
        """Show what is scheduled by the given files.

        Given a requested set of files (which can be specified using
        wildcards), print the total set of scheduled components.
        """
        from mozbuild.frontend.reader import EmptyConfig, BuildReader
        config = EmptyConfig(TOPSRCDIR)
        reader = BuildReader(config)
        schedules = set()
        for p, m in reader.files_info(paths).items():
            schedules |= set(m['SCHEDULES'].components)

        print(", ".join(schedules))
    def test_filesystem_traversal_reading(self):
        """Reading moz.build according to filesystem traversal works.

        We attempt to read every known moz.build file via filesystem traversal.

        If this test fails, it means that metadata extraction will fail.
        """
        mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
        config = mb.config_environment
        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        paths, contexts = reader.read_relevant_mozbuilds(all_paths)
        self.assertEqual(set(paths), all_paths)
        self.assertGreaterEqual(len(contexts), len(paths))
Exemple #14
0
    def test_filesystem_traversal_reading(self):
        """Reading moz.build according to filesystem traversal works.

        We attempt to read every known moz.build file via filesystem traversal.

        If this test fails, it means that metadata extraction will fail.
        """
        mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
        config = mb.config_environment
        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        paths, contexts = reader.read_relevant_mozbuilds(all_paths)
        self.assertEqual(set(paths), all_paths)
        self.assertGreaterEqual(len(contexts), len(paths))
Exemple #15
0
    def file_info_schedules(self, paths):
        """Show what is scheduled by the given files.

        Given a requested set of files (which can be specified using
        wildcards), print the total set of scheduled components.
        """
        from mozbuild.frontend.reader import EmptyConfig, BuildReader
        config = EmptyConfig(TOPSRCDIR)
        reader = BuildReader(config)
        schedules = set()
        for p, m in reader.files_info(paths).items():
            schedules |= set(m['SCHEDULES'].components)

        print(", ".join(schedules))
    def test_filesystem_traversal_no_config(self):
        """Reading moz.build files via filesystem traversal mode with no build config.

        This is similar to the above test except no build config is applied.
        This will likely fail in more scenarios than the above test because a
        lot of moz.build files assumes certain variables are present.
        """
        here = os.path.abspath(os.path.dirname(__file__))
        root = os.path.normpath(os.path.join(here, '..', '..'))
        config = EmptyConfig(root)
        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        paths, contexts = reader.read_relevant_mozbuilds(all_paths)
        self.assertEqual(set(paths.keys()), all_paths)
        self.assertGreaterEqual(len(contexts), len(paths))
Exemple #17
0
    def test_filesystem_traversal_no_config(self):
        """Reading moz.build files via filesystem traversal mode with no build config.

        This is similar to the above test except no build config is applied.
        This will likely fail in more scenarios than the above test because a
        lot of moz.build files assumes certain variables are present.
        """
        here = os.path.abspath(os.path.dirname(__file__))
        root = os.path.normpath(os.path.join(here, '..', '..'))
        config = EmptyConfig(root)
        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        paths, contexts = reader.read_relevant_mozbuilds(all_paths)
        self.assertEqual(set(paths.keys()), all_paths)
        self.assertGreaterEqual(len(contexts), len(paths))
Exemple #18
0
    def reader(self, name, enable_tests=False):
        config = self.config(name)

        if enable_tests:
            config.substs['ENABLE_TESTS'] = '1'

        return BuildReader(config)
Exemple #19
0
    def reader(self, name, enable_tests=False, **kwargs):
        extra = {}
        if enable_tests:
            extra['ENABLE_TESTS'] = '1'
        config = self.config(name, extra_substs=extra)

        return BuildReader(config, **kwargs)
Exemple #20
0
    def reader(self, name):
        config = MockConfig(os.path.join(data_path, name), extra_substs=dict(
            ENABLE_TESTS='1',
            BIN_SUFFIX='.prog',
        ))

        return BuildReader(config)
Exemple #21
0
def gen_test_backend():
    build_obj = MozbuildObject.from_environment()
    try:
        config = build_obj.config_environment
    except BuildEnvironmentNotFoundException:
        print("No build detected, test metadata may be incomplete.")
        config = EmptyConfig(build_obj.topsrcdir)
        config.topobjdir = build_obj.topobjdir

    reader = BuildReader(config)
    emitter = TreeMetadataEmitter(config)
    backend = TestManifestBackend(config)

    context = reader.read_topsrcdir()
    data = emitter.emit(context, emitfn=emitter._process_test_manifests)
    backend.consume(data)
Exemple #22
0
def resolver(request, tmpdir, monkeypatch, topsrcdir, all_tests, defaults):
    topobjdir = tmpdir.mkdir("objdir").strpath
    loader_cls = request.param

    if loader_cls == BuildBackendLoader:
        with open(os.path.join(topobjdir, "all-tests.pkl"), "wb") as fh:
            pickle.dump(all_tests, fh)
        with open(os.path.join(topobjdir, "test-defaults.pkl"), "wb") as fh:
            pickle.dump(defaults, fh)

        # The mock data already exists, so prevent BuildBackendLoader from regenerating
        # the build information from the whole gecko tree...
        class BuildBackendLoaderNeverOutOfDate(BuildBackendLoader):
            def backend_out_of_date(self, backend_file):
                return False

        loader_cls = BuildBackendLoaderNeverOutOfDate

    # Patch WPT's manifestupdate.run to return tests based on the contents of
    # 'data/srcdir/wpt_manifest_data.json'.
    monkeypatch.setattr(manifestupdate, "run", fake_wpt_manifestupdate)

    resolver = TestResolver(topsrcdir,
                            None,
                            None,
                            topobjdir=topobjdir,
                            loader_cls=loader_cls)
    resolver._puppeteer_loaded = True

    if loader_cls == TestManifestLoader:
        config = MockConfig(topsrcdir)
        resolver.load_tests.reader = BuildReader(config)
    return resolver
Exemple #23
0
def resolver(request, tmpdir, topsrcdir, all_tests, defaults):
    topobjdir = tmpdir.mkdir("objdir").strpath
    loader_cls = request.param

    if loader_cls == BuildBackendLoader:
        with open(os.path.join(topobjdir, 'all-tests.pkl'), 'wb') as fh:
            pickle.dump(all_tests, fh)
        with open(os.path.join(topobjdir, 'test-defaults.pkl'), 'wb') as fh:
            pickle.dump(defaults, fh)

        # The mock data already exists, so prevent BuildBackendLoader from regenerating
        # the build information from the whole gecko tree...
        class BuildBackendLoaderNeverOutOfDate(BuildBackendLoader):
            def backend_out_of_date(self, backend_file):
                return False

        loader_cls = BuildBackendLoaderNeverOutOfDate

    resolver = TestResolver(topsrcdir,
                            None,
                            None,
                            topobjdir=topobjdir,
                            loader_cls=loader_cls)
    resolver._puppeteer_loaded = True
    resolver._wpt_loaded = True

    if loader_cls == TestManifestLoader:
        config = MockConfig(topsrcdir)
        resolver.load_tests.reader = BuildReader(config)
    return resolver
Exemple #24
0
def read_build_config(docdir):
    """Read the active build config and return the relevant doc paths.

    The return value is cached so re-generating with the same docdir won't
    invoke the build system a second time."""
    trees = {}
    python_package_dirs = set()

    is_main = docdir == MAIN_DOC_PATH
    relevant_mozbuild_path = None if is_main else docdir

    # Reading the Sphinx variables doesn't require a full build context.
    # Only define the parts we need.
    class fakeconfig(object):
        topsrcdir = build.topsrcdir

    variables = ("SPHINX_TREES", "SPHINX_PYTHON_PACKAGE_DIRS")
    reader = BuildReader(fakeconfig())
    result = reader.find_variables_from_ast(variables,
                                            path=relevant_mozbuild_path)
    for path, name, key, value in result:
        reldir = os.path.dirname(path)

        if name == "SPHINX_TREES":
            # If we're building a subtree, only process that specific subtree.
            # topsrcdir always uses POSIX-style path, normalize it for proper comparison.
            absdir = os.path.normpath(
                os.path.join(build.topsrcdir, reldir, value))
            if not is_main and absdir not in (docdir, MAIN_DOC_PATH):
                continue

            assert key
            if key.startswith("/"):
                key = key[1:]
            else:
                key = os.path.normpath(os.path.join(reldir, key))

            if key in trees:
                raise Exception(
                    "%s has already been registered as a destination." % key)
            trees[key] = os.path.join(reldir, value)

        if name == "SPHINX_PYTHON_PACKAGE_DIRS":
            python_package_dirs.add(os.path.join(reldir, value))

    return trees, python_package_dirs
Exemple #25
0
    def _get_reader(self, finder):
        from mozbuild.frontend.reader import (
            BuildReader,
            EmptyConfig,
        )

        config = EmptyConfig(self.topsrcdir)
        return BuildReader(config, finder=finder)
    def test_mtime_no_change(self):
        """Ensure mtime is not updated if file content does not change."""

        env = self._consume('stub0', RecursiveMakeBackend)

        makefile_path = mozpath.join(env.topobjdir, 'Makefile')
        backend_path = mozpath.join(env.topobjdir, 'backend.mk')
        makefile_mtime = os.path.getmtime(makefile_path)
        backend_mtime = os.path.getmtime(backend_path)

        reader = BuildReader(env)
        emitter = TreeMetadataEmitter(env)
        backend = RecursiveMakeBackend(env)
        backend.consume(emitter.emit(reader.read_topsrcdir()))

        self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime)
        self.assertEqual(os.path.getmtime(backend_path), backend_mtime)
Exemple #27
0
    def test_mtime_no_change(self):
        """Ensure mtime is not updated if file content does not change."""

        env = self._consume('stub0', RecursiveMakeBackend)

        makefile_path = mozpath.join(env.topobjdir, 'Makefile')
        backend_path = mozpath.join(env.topobjdir, 'backend.mk')
        makefile_mtime = os.path.getmtime(makefile_path)
        backend_mtime = os.path.getmtime(backend_path)

        reader = BuildReader(env)
        emitter = TreeMetadataEmitter(env)
        backend = RecursiveMakeBackend(env)
        backend.consume(emitter.emit(reader.read_topsrcdir()))

        self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime)
        self.assertEqual(os.path.getmtime(backend_path), backend_mtime)
Exemple #28
0
    def get_outgoing_metadata(self):
        paths, tags, flavors = set(), set(), set()
        changed_files = self.vcs.get_outgoing_files('AM')
        if changed_files:
            config = EmptyConfig(self.topsrcdir)
            reader = BuildReader(config)
            files_info = reader.files_info(changed_files)

            for path, info in files_info.items():
                paths |= info.test_files
                tags |= info.test_tags
                flavors |= info.test_flavors

        return {
            'paths': paths,
            'tags': tags,
            'flavors': flavors,
        }
Exemple #29
0
    def get_outgoing_metadata(self):
        paths, tags, flavors = set(), set(), set()
        changed_files = self.vcs.get_outgoing_files('AM')
        if changed_files:
            config = EmptyConfig(self.topsrcdir)
            reader = BuildReader(config)
            files_info = reader.files_info(changed_files)

            for path, info in files_info.items():
                paths |= info.test_files
                tags |= info.test_tags
                flavors |= info.test_flavors

        return {
            'paths': paths,
            'tags': tags,
            'flavors': flavors,
        }
Exemple #30
0
    def reader(self, name, enable_tests=False, error_is_fatal=True, **kwargs):
        extra = {}
        if enable_tests:
            extra["ENABLE_TESTS"] = "1"
        config = self.config(name,
                             extra_substs=extra,
                             error_is_fatal=error_is_fatal)

        return BuildReader(config, **kwargs)
Exemple #31
0
    def reader(self, name, enable_tests=False):
        config = MockConfig(mozpath.join(data_path, name),
                            extra_substs=dict(
                                ENABLE_TESTS='1' if enable_tests else '',
                                BIN_SUFFIX='.prog',
                                OS_TARGET='WINNT',
                            ))

        return BuildReader(config)
Exemple #32
0
    def do_test_backend(self, *backends, **kwargs):
        topobjdir = mkdtemp()
        try:
            config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir,
                                       **kwargs)
            reader = BuildReader(config)
            emitter = TreeMetadataEmitter(config)
            moz_build = mozpath.join(config.topsrcdir, 'test.mozbuild')
            definitions = list(emitter.emit(
                reader.read_mozbuild(moz_build, config)))
            for backend in backends:
                backend(config).consume(definitions)

            yield config
        except:
            raise
        finally:
            if not os.environ.get('MOZ_NO_CLEANUP'):
                shutil.rmtree(topobjdir)
    def test_orphan_file_patterns(self):
        if sys.platform == 'win32':
            raise unittest.SkipTest('failing on windows builds')

        mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)

        try:
            config = mb.config_environment
        except Exception as e:
            if e.message == 'config.status not available. Run configure.':
                raise unittest.SkipTest('failing without config.status')
            raise

        if config.substs['MOZ_BUILD_APP'] == 'js':
            raise unittest.SkipTest('failing in Spidermonkey builds')

        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        _, contexts = reader.read_relevant_mozbuilds(all_paths)

        finder = FileFinder(config.topsrcdir, find_executables=False,
                            ignore=['obj*'])

        def pattern_exists(pat):
            return [p for p in finder.find(pat)] != []

        for ctx in contexts:
            if not isinstance(ctx, Files):
                continue
            relsrcdir = ctx.relsrcdir
            if not pattern_exists(os.path.join(relsrcdir, ctx.pattern)):
                self.fail("The pattern '%s' in a Files() entry in "
                          "'%s' corresponds to no files in the tree.\n"
                          "Please update this entry." %
                          (ctx.pattern, ctx.main_path))
            test_files = ctx['IMPACTED_TESTS'].files
            for p in test_files:
                if not pattern_exists(os.path.relpath(p.full_path, config.topsrcdir)):
                    self.fail("The pattern '%s' in a dependent tests entry "
                              "in '%s' corresponds to no files in the tree.\n"
                              "Please update this entry." %
                              (p, ctx.main_path))
Exemple #34
0
    def reader(self, name, enable_tests=False, extra_substs=None):
        substs = dict(
            ENABLE_TESTS='1' if enable_tests else '',
            BIN_SUFFIX='.prog',
            OS_TARGET='WINNT',
            COMPILE_ENVIRONMENT='1',
        )
        if extra_substs:
            substs.update(extra_substs)
        config = MockConfig(mozpath.join(data_path, name), extra_substs=substs)

        return BuildReader(config)
Exemple #35
0
    def do_test_backend(self, *backends, **kwargs):
        # Create the objdir in the srcdir to ensure that they share
        # the same drive on Windows.
        topobjdir = mkdtemp(dir=buildconfig.topsrcdir)
        try:
            config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir,
                                       **kwargs)
            reader = BuildReader(config)
            emitter = TreeMetadataEmitter(config)
            moz_build = mozpath.join(config.topsrcdir, "test.mozbuild")
            definitions = list(
                emitter.emit(reader.read_mozbuild(moz_build, config)))
            for backend in backends:
                backend(config).consume(definitions)

            yield config
        except Exception:
            raise
        finally:
            if not os.environ.get("MOZ_NO_CLEANUP"):
                shutil.rmtree(topobjdir)
Exemple #36
0
def read_build_config(docdir):
    """Read the active build config and return the relevant doc paths.

    The return value is cached so re-generating with the same docdir won't
    invoke the build system a second time."""
    trees = {}
    python_package_dirs = set()

    is_main = docdir == MAIN_DOC_PATH
    relevant_mozbuild_path = None if is_main else docdir

    # Reading the Sphinx variables doesn't require a full build context.
    # Only define the parts we need.
    class fakeconfig(object):
        topsrcdir = build.topsrcdir

    reader = BuildReader(fakeconfig())
    for path, name, key, value in reader.find_sphinx_variables(relevant_mozbuild_path):
        reldir = os.path.join(os.path.dirname(path), value)

        if name == 'SPHINX_TREES':
            # If we're building a subtree, only process that specific subtree.
            absdir = os.path.join(build.topsrcdir, reldir)
            if not is_main and absdir not in (docdir, MAIN_DOC_PATH):
                continue

            assert key
            if key.startswith('/'):
                key = key[1:]
            else:
                key = os.path.join(reldir, key)

            if key in trees:
                raise Exception('%s has already been registered as a destination.' % key)
            trees[key] = reldir

        if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
            python_package_dirs.add(reldir)

    return trees, python_package_dirs
Exemple #37
0
    def test_orphan_file_patterns(self):
        if sys.platform == 'win32':
            raise unittest.SkipTest('failing on windows builds')

        mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)

        try:
            config = mb.config_environment
        except Exception as e:
            if e.message == 'config.status not available. Run configure.':
                raise unittest.SkipTest('failing without config.status')
            raise

        reader = BuildReader(config)
        all_paths = self._mozbuilds(reader)
        _, contexts = reader.read_relevant_mozbuilds(all_paths)

        finder = FileFinder(config.topsrcdir,
                            find_executables=False,
                            ignore=['obj*'])

        def pattern_exists(pat):
            return [p for p in finder.find(pat)] != []

        for ctx in contexts:
            if not isinstance(ctx, Files):
                continue
            relsrcdir = ctx.relsrcdir
            if not pattern_exists(os.path.join(relsrcdir, ctx.pattern)):
                self.fail("The pattern '%s' in a Files() entry in "
                          "'%s' corresponds to no files in the tree.\n"
                          "Please update this entry." %
                          (ctx.pattern, ctx.main_path))
            test_files = ctx['IMPACTED_TESTS'].files
            for p in test_files:
                if not pattern_exists(
                        os.path.relpath(p.full_path, config.topsrcdir)):
                    self.fail("The pattern '%s' in a dependent tests entry "
                              "in '%s' corresponds to no files in the tree.\n"
                              "Please update this entry." % (p, ctx.main_path))
Exemple #38
0
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('sphinx_rtd_theme==0.1.6')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir,
                                os.path.join(self.topsrcdir, 'tools', 'docs'),
                                outdir)

        # We don't care about GYP projects, so don't process them. This makes
        # scanning faster and may even prevent an exception.
        def remove_gyp_dirs(context):
            context['GYP_DIRS'][:] = []

        # Reading the Sphinx variables doesn't require a full build context.
        # Only define the parts we need.
        class fakeconfig(object):
            def __init__(self, topsrcdir):
                self.topsrcdir = topsrcdir

        config = fakeconfig(self.topsrcdir)
        reader = BuildReader(config)

        for path, name, key, value in reader.find_sphinx_variables():
            reldir = os.path.dirname(path)

            if name == 'SPHINX_TREES':
                assert key
                manager.add_tree(os.path.join(reldir, value),
                                 os.path.join(reldir, key))

            if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                manager.add_python_package_dir(os.path.join(reldir, value))

        return manager.generate_docs(format)
Exemple #39
0
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('sphinx_rtd_theme==0.1.6')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir, os.path.join(self.topsrcdir,
            'tools', 'docs'), outdir)

        # We don't care about GYP projects, so don't process them. This makes
        # scanning faster and may even prevent an exception.
        def remove_gyp_dirs(context):
            context['GYP_DIRS'][:] = []

        # Reading the Sphinx variables doesn't require a full build context.
        # Only define the parts we need.
        class fakeconfig(object):
            def __init__(self, topsrcdir):
                self.topsrcdir = topsrcdir

        config = fakeconfig(self.topsrcdir)
        reader = BuildReader(config)

        for path, name, key, value in reader.find_sphinx_variables():
            reldir = os.path.dirname(path)

            if name == 'SPHINX_TREES':
                assert key
                manager.add_tree(os.path.join(reldir, value),
                        os.path.join(reldir, key))

            if name == 'SPHINX_PYTHON_PACKAGE_DIRS':
                manager.add_python_package_dir(os.path.join(reldir, value))

        return manager.generate_docs(format)
Exemple #40
0
    def build_docs(self, format=None, outdir=None):
        self._activate_virtualenv()
        self.virtualenv_manager.install_pip_package('mdn-sphinx-theme==0.4')

        from moztreedocs import SphinxManager

        if outdir == '<DEFAULT>':
            outdir = os.path.join(self.topobjdir, 'docs')

        manager = SphinxManager(self.topsrcdir, os.path.join(self.topsrcdir,
            'tools', 'docs'), outdir)

        reader = BuildReader(self.config_environment)
        for sandbox in reader.walk_topsrcdir():
            for dest_dir, source_dir in sandbox['SPHINX_TREES'].items():
                manager.add_tree(os.path.join(sandbox['RELATIVEDIR'],
                    source_dir), dest_dir)

            for entry in sandbox['SPHINX_PYTHON_PACKAGE_DIRS']:
                manager.add_python_package_dir(os.path.join(sandbox['RELATIVEDIR'],
                    entry))

        return manager.generate_docs(format)
Exemple #41
0
def gen_test_backend():
    build_obj = MozbuildObject.from_environment()
    try:
        config = build_obj.config_environment
    except BuildEnvironmentNotFoundException:
        print("No build detected, test metadata may be incomplete.")

        # If 'JS_STANDALONE' is set, tests that don't require an objdir won't
        # be picked up due to bug 1345209.
        substs = EmptyConfig.default_substs
        if 'JS_STANDALONE' in substs:
            del substs['JS_STANDALONE']

        config = EmptyConfig(build_obj.topsrcdir, substs)
        config.topobjdir = build_obj.topobjdir

    reader = BuildReader(config)
    emitter = TreeMetadataEmitter(config)
    backend = TestManifestBackend(config)

    context = reader.read_topsrcdir()
    data = emitter.emit(context, emitfn=emitter._process_test_manifests)
    backend.consume(data)
Exemple #42
0
def resolver(request, tmpdir, topsrcdir, all_tests, defaults):
    topobjdir = tmpdir.mkdir("objdir").strpath
    loader_cls = request.param

    if loader_cls == BuildBackendLoader:
        with open(os.path.join(topobjdir, 'all-tests.pkl'), 'wb') as fh:
            pickle.dump(all_tests, fh)
        with open(os.path.join(topobjdir, 'test-defaults.pkl'), 'wb') as fh:
            pickle.dump(defaults, fh)

    resolver = TestResolver(topsrcdir, None, None, topobjdir=topobjdir, loader_cls=loader_cls)
    resolver._puppeteer_loaded = True
    resolver._wpt_loaded = True

    if loader_cls == TestManifestLoader:
        config = MockConfig(topsrcdir)
        resolver.load_tests.reader = BuildReader(config)
    return resolver
Exemple #43
0
def config_status(topobjdir=".", topsrcdir=".", defines=[], non_global_defines=[], substs=[], source=None):
    """Main function, providing config.status functionality.

    Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
    variables.

    Without the -n option, this program acts as config.status and considers
    the current directory as the top object directory, even when config.status
    is in a different directory. It will, however, treat the directory
    containing config.status as the top object directory with the -n option.

    The --recheck option, like with the original config.status, runs configure
    again, with the options given in the "ac_configure_args" subst.

    The options to this function are passed when creating the
    ConfigEnvironment. These lists, as well as the actual wrapper script
    around this function, are meant to be generated by configure.
    See build/autoconf/config.status.m4.
    """

    if "CONFIG_FILES" in os.environ:
        raise Exception("Using the CONFIG_FILES environment variable is not " "supported.")
    if "CONFIG_HEADERS" in os.environ:
        raise Exception("Using the CONFIG_HEADERS environment variable is not " "supported.")

    if not os.path.isabs(topsrcdir):
        raise Exception("topsrcdir must be defined as an absolute directory: " "%s" % topsrcdir)

    parser = OptionParser()
    parser.add_option(
        "--recheck",
        dest="recheck",
        action="store_true",
        help="update config.status by reconfiguring in the same conditions",
    )
    parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="display verbose output")
    parser.add_option(
        "-n",
        dest="not_topobjdir",
        action="store_true",
        help="do not consider current directory as top object directory",
    )
    parser.add_option("-d", "--diff", action="store_true", help="print diffs of changed files.")
    parser.add_option(
        "-b",
        "--backend",
        choices=["RecursiveMake", "AndroidEclipse", "CppEclipse", "VisualStudio", "Build", "ForceBuild"],
        default="Build",
        help="what backend to build (default: RecursiveMake).",
    )
    options, args = parser.parse_args()

    # Without -n, the current directory is meant to be the top object directory
    if not options.not_topobjdir:
        topobjdir = os.path.abspath(".")

    env = ConfigEnvironment(
        topsrcdir, topobjdir, defines=defines, non_global_defines=non_global_defines, substs=substs, source=source
    )

    # mozinfo.json only needs written if configure changes and configure always
    # passes this environment variable.
    if "WRITE_MOZINFO" in os.environ:
        write_mozinfo(os.path.join(topobjdir, "mozinfo.json"), env, os.environ)

    # Make an appropriate backend instance, defaulting to RecursiveMakeBackend.
    backend_cls = RecursiveMakeBackend
    if options.backend == "AndroidEclipse":
        from mozbuild.backend.android_eclipse import AndroidEclipseBackend

        if not MachCommandConditions.is_android(env):
            raise Exception("The Android Eclipse backend is not available with this configuration.")
        backend_cls = AndroidEclipseBackend
    elif options.backend == "CppEclipse":
        from mozbuild.backend.cpp_eclipse import CppEclipseBackend

        backend_cls = CppEclipseBackend
        if os.name == "nt":
            raise Exception("Eclipse is not supported on Windows. Consider using Visual Studio instead.")
    elif options.backend == "VisualStudio":
        from mozbuild.backend.visualstudio import VisualStudioBackend

        backend_cls = VisualStudioBackend
    elif options.backend == "Build":
        from mozbuild.backend.visualstudio import VisualStudioBackend

        backend_cls = VisualStudioBackend
    elif options.backend == "ForceBuild":
        from mozbuild.backend.visualstudio import VisualStudioBackend

        backend_cls = VisualStudioBackend

    the_backend = backend_cls(env)
    if options.backend == "Build":
        if the_backend.try_build():
            return
    elif options.backend == "ForceBuild":
        the_backend.full_build()

    reader = BuildReader(env)
    emitter = TreeMetadataEmitter(env)
    # This won't actually do anything because of the magic of generators.
    definitions = emitter.emit(reader.read_topsrcdir())

    if options.recheck:
        # Execute configure from the top object directory
        os.chdir(topobjdir)
        os.execlp(
            "sh",
            "sh",
            "-c",
            " ".join(
                [os.path.join(topsrcdir, "configure"), env.substs["ac_configure_args"], "--no-create", "--no-recursion"]
            ),
        )

    log_level = logging.DEBUG if options.verbose else logging.INFO
    log_manager.add_terminal_logging(level=log_level)
    log_manager.enable_unstructured()

    print("Reticulating splines...", file=sys.stderr)
    summary = the_backend.consume(definitions)

    for line in summary.summaries():
        print(line, file=sys.stderr)

    if options.diff:
        for path, diff in sorted(summary.file_diffs.items()):
            print("\n".join(diff))

    # Advertise Visual Studio if appropriate.
    if os.name == "nt" and options.backend == "RecursiveMake":
        print(VISUAL_STUDIO_ADVERTISEMENT)

    # Advertise Eclipse if it is appropriate.
    if MachCommandConditions.is_android(env):
        if options.backend == "RecursiveMake":
            print(ANDROID_IDE_ADVERTISEMENT)
Exemple #44
0
def config_status(topobjdir='.', topsrcdir='.',
        defines=[], non_global_defines=[], substs=[], source=None):
    '''Main function, providing config.status functionality.

    Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
    variables.

    Without the -n option, this program acts as config.status and considers
    the current directory as the top object directory, even when config.status
    is in a different directory. It will, however, treat the directory
    containing config.status as the top object directory with the -n option.

    The --recheck option, like with the original config.status, runs configure
    again, with the options given in the "ac_configure_args" subst.

    The options to this function are passed when creating the
    ConfigEnvironment. These lists, as well as the actual wrapper script
    around this function, are meant to be generated by configure.
    See build/autoconf/config.status.m4.
    '''

    if 'CONFIG_FILES' in os.environ:
        raise Exception('Using the CONFIG_FILES environment variable is not '
            'supported.')
    if 'CONFIG_HEADERS' in os.environ:
        raise Exception('Using the CONFIG_HEADERS environment variable is not '
            'supported.')

    if not os.path.isabs(topsrcdir):
        raise Exception('topsrcdir must be defined as an absolute directory: '
            '%s' % topsrcdir)

    parser = OptionParser()
    parser.add_option('--recheck', dest='recheck', action='store_true',
                      help='update config.status by reconfiguring in the same conditions')
    parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
                      help='display verbose output')
    parser.add_option('-n', dest='not_topobjdir', action='store_true',
                      help='do not consider current directory as top object directory')
    parser.add_option('-d', '--diff', action='store_true',
                      help='print diffs of changed files.')
    parser.add_option('-b', '--backend',
                      choices=['RecursiveMake', 'AndroidEclipse', 'CppEclipse', 'VisualStudio'],
                      default='RecursiveMake',
                      help='what backend to build (default: RecursiveMake).')
    options, args = parser.parse_args()

    # Without -n, the current directory is meant to be the top object directory
    if not options.not_topobjdir:
        topobjdir = os.path.abspath('.')

    env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
            non_global_defines=non_global_defines, substs=substs, source=source)

    # mozinfo.json only needs written if configure changes and configure always
    # passes this environment variable.
    if 'WRITE_MOZINFO' in os.environ:
        write_mozinfo(os.path.join(topobjdir, 'mozinfo.json'), env, os.environ)

    # Make an appropriate backend instance, defaulting to RecursiveMakeBackend.
    backend_cls = RecursiveMakeBackend
    if options.backend == 'AndroidEclipse':
        from mozbuild.backend.android_eclipse import AndroidEclipseBackend
        if not MachCommandConditions.is_android(env):
            raise Exception('The Android Eclipse backend is not available with this configuration.')
        backend_cls = AndroidEclipseBackend
    elif options.backend == 'CppEclipse':
        from mozbuild.backend.cpp_eclipse import CppEclipseBackend
        backend_cls = CppEclipseBackend
        if os.name == 'nt':
          raise Exception('Eclipse is not supported on Windows. Consider using Visual Studio instead.')
    elif options.backend == 'VisualStudio':
        from mozbuild.backend.visualstudio import VisualStudioBackend
        backend_cls = VisualStudioBackend

    the_backend = backend_cls(env)

    reader = BuildReader(env)
    emitter = TreeMetadataEmitter(env)
    # This won't actually do anything because of the magic of generators.
    definitions = emitter.emit(reader.read_topsrcdir())

    if options.recheck:
        # Execute configure from the top object directory
        os.chdir(topobjdir)
        os.execlp('sh', 'sh', '-c', ' '.join([os.path.join(topsrcdir, 'configure'), env.substs['ac_configure_args'], '--no-create', '--no-recursion']))

    log_level = logging.DEBUG if options.verbose else logging.INFO
    log_manager.add_terminal_logging(level=log_level)
    log_manager.enable_unstructured()

    print('Reticulating splines...', file=sys.stderr)
    summary = the_backend.consume(definitions)

    for line in summary.summaries():
        print(line, file=sys.stderr)

    if options.diff:
        for path, diff in sorted(summary.file_diffs.items()):
            print('\n'.join(diff))

    # Advertise Visual Studio if appropriate.
    if os.name == 'nt' and options.backend == 'RecursiveMake':
        print(VISUAL_STUDIO_ADVERTISEMENT)

    # Advertise Eclipse if it is appropriate.
    if MachCommandConditions.is_android(env):
        if options.backend == 'RecursiveMake':
            print(ANDROID_IDE_ADVERTISEMENT)
Exemple #45
0
def config_status(topobjdir='.', topsrcdir='.', defines=None,
                  non_global_defines=None, substs=None, source=None,
                  mozconfig=None):
    '''Main function, providing config.status functionality.

    Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
    variables.

    Without the -n option, this program acts as config.status and considers
    the current directory as the top object directory, even when config.status
    is in a different directory. It will, however, treat the directory
    containing config.status as the top object directory with the -n option.

    The options to this function are passed when creating the
    ConfigEnvironment. These lists, as well as the actual wrapper script
    around this function, are meant to be generated by configure.
    See build/autoconf/config.status.m4.
    '''

    if 'CONFIG_FILES' in os.environ:
        raise Exception('Using the CONFIG_FILES environment variable is not '
            'supported.')
    if 'CONFIG_HEADERS' in os.environ:
        raise Exception('Using the CONFIG_HEADERS environment variable is not '
            'supported.')

    if not os.path.isabs(topsrcdir):
        raise Exception('topsrcdir must be defined as an absolute directory: '
            '%s' % topsrcdir)

    default_backends = ['RecursiveMake']
    default_backends = (substs or {}).get('BUILD_BACKENDS', ['RecursiveMake'])

    parser = ArgumentParser()
    parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
                        help='display verbose output')
    parser.add_argument('-n', dest='not_topobjdir', action='store_true',
                        help='do not consider current directory as top object directory')
    parser.add_argument('-d', '--diff', action='store_true',
                        help='print diffs of changed files.')
    parser.add_argument('-b', '--backend', nargs='+', choices=sorted(backends),
                        default=default_backends,
                        help='what backend to build (default: %s).' %
                        ' '.join(default_backends))
    parser.add_argument('--dry-run', action='store_true',
                        help='do everything except writing files out.')
    options = parser.parse_args()

    # Without -n, the current directory is meant to be the top object directory
    if not options.not_topobjdir:
        topobjdir = os.path.abspath('.')

    env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
            non_global_defines=non_global_defines, substs=substs,
            source=source, mozconfig=mozconfig)

    # mozinfo.json only needs written if configure changes and configure always
    # passes this environment variable.
    if 'WRITE_MOZINFO' in os.environ:
        write_mozinfo(os.path.join(topobjdir, 'mozinfo.json'), env, os.environ)

    cpu_start = time.clock()
    time_start = time.time()

    # Make appropriate backend instances, defaulting to RecursiveMakeBackend,
    # or what is in BUILD_BACKENDS.
    selected_backends = [get_backend_class(b)(env) for b in options.backend]

    if options.dry_run:
        for b in selected_backends:
            b.dry_run = True

    reader = BuildReader(env)
    emitter = TreeMetadataEmitter(env)
    # This won't actually do anything because of the magic of generators.
    definitions = emitter.emit(reader.read_topsrcdir())

    log_level = logging.DEBUG if options.verbose else logging.INFO
    log_manager.add_terminal_logging(level=log_level)
    log_manager.enable_unstructured()

    print('Reticulating splines...', file=sys.stderr)
    if len(selected_backends) > 1:
        definitions = list(definitions)

    for the_backend in selected_backends:
        the_backend.consume(definitions)

    execution_time = 0.0
    for obj in chain((reader, emitter), selected_backends):
        summary = obj.summary()
        print(summary, file=sys.stderr)
        execution_time += summary.execution_time

    cpu_time = time.clock() - cpu_start
    wall_time = time.time() - time_start
    efficiency = cpu_time / wall_time if wall_time else 100
    untracked = wall_time - execution_time

    print(
        'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: '
        '{:.0%}; Untracked: {:.2f}s'.format(
            wall_time, cpu_time, efficiency, untracked),
        file=sys.stderr
    )

    if options.diff:
        for the_backend in selected_backends:
            for path, diff in sorted(the_backend.file_diffs.items()):
                print('\n'.join(diff))

    # Advertise Visual Studio if appropriate.
    if os.name == 'nt' and 'VisualStudio' not in options.backend:
        print(VISUAL_STUDIO_ADVERTISEMENT)

    # Advertise Eclipse if it is appropriate.
    if MachCommandConditions.is_android(env):
        if 'AndroidEclipse' not in options.backend:
            print(ANDROID_IDE_ADVERTISEMENT)
Exemple #46
0
    def test(self, what):
        """Run tests from names or paths.

        mach test accepts arguments specifying which tests to run. Each argument
        can be:

        * The path to a test file
        * A directory containing tests
        * A test suite name
        * An alias to a test suite name (codes used on TreeHerder)

        If no input is provided, tests will be run based on files changed in
        the local tree. Relevant tests, tags, or flavors are determined by
        IMPACTED_TESTS annotations in moz.build files relevant to the
        changed files.

        When paths or directories are given, they are first resolved to test
        files known to the build system.

        If resolved tests belong to more than one test type/flavor/harness,
        the harness for each relevant type/flavor will be invoked. e.g. if
        you specify a directory with xpcshell and browser chrome mochitests,
        both harnesses will be invoked.
        """
        from mozbuild.testing import TestResolver

        # Parse arguments and assemble a test "plan."
        run_suites = set()
        run_tests = []
        resolver = self._spawn(TestResolver)

        for entry in what:
            # If the path matches the name or alias of an entire suite, run
            # the entire suite.
            if entry in TEST_SUITES:
                run_suites.add(entry)
                continue
            suitefound = False
            for suite, v in TEST_SUITES.items():
                if entry in v.get('aliases', []):
                    run_suites.add(suite)
                    suitefound = True
            if suitefound:
                continue

            # Now look for file/directory matches in the TestResolver.
            relpath = self._wrap_path_argument(entry).relpath()
            tests = list(resolver.resolve_tests(paths=[relpath]))
            run_tests.extend(tests)

            if not tests:
                print('UNKNOWN TEST: %s' % entry, file=sys.stderr)

        if not what:
            # TODO: This isn't really related to try, and should be
            # extracted to a common library for vcs interactions when it is
            # introduced in bug 1185599.
            from autotry import AutoTry
            at = AutoTry(self.topsrcdir, resolver, self._mach_context)
            changed_files = at.find_changed_files()
            if changed_files:
                print("Tests will be run based on modifications to the "
                      "following files:\n\t%s" % "\n\t".join(changed_files))

            from mozbuild.frontend.reader import (
                BuildReader,
                EmptyConfig,
            )
            config = EmptyConfig(self.topsrcdir)
            reader = BuildReader(config)
            files_info = reader.files_info(changed_files)

            paths, tags, flavors = set(), set(), set()
            for info in files_info.values():
                paths |= info.test_files
                tags |= info.test_tags
                flavors |= info.test_flavors

            # This requires multiple calls to resolve_tests, because the test
            # resolver returns tests that match every condition, while we want
            # tests that match any condition. Bug 1210213 tracks implementing
            # more flexible querying.
            if tags:
                run_tests = list(resolver.resolve_tests(tags=tags))
            if paths:
                run_tests += [t for t in resolver.resolve_tests(paths=paths)
                              if not (tags & set(t.get('tags', '').split()))]
            if flavors:
                run_tests = [t for t in run_tests if t['flavor'] not in flavors]
                for flavor in flavors:
                    run_tests += list(resolver.resolve_tests(flavor=flavor))

        if not run_suites and not run_tests:
            print(UNKNOWN_TEST)
            return 1

        status = None
        for suite_name in run_suites:
            suite = TEST_SUITES[suite_name]

            if 'mach_command' in suite:
                res = self._mach_context.commands.dispatch(
                    suite['mach_command'], self._mach_context,
                    **suite['kwargs'])
                if res:
                    status = res

        buckets = {}
        for test in run_tests:
            key = (test['flavor'], test['subsuite'])
            buckets.setdefault(key, []).append(test)

        for (flavor, subsuite), tests in sorted(buckets.items()):
            if flavor not in TEST_FLAVORS:
                print(UNKNOWN_FLAVOR % flavor)
                status = 1
                continue

            m = TEST_FLAVORS[flavor]
            if 'mach_command' not in m:
                print(UNKNOWN_FLAVOR % flavor)
                status = 1
                continue

            kwargs = dict(m['kwargs'])
            kwargs['subsuite'] = subsuite

            res = self._mach_context.commands.dispatch(
                    m['mach_command'], self._mach_context,
                    test_objects=tests, **kwargs)
            if res:
                status = res

        return status
Exemple #47
0
def config_status(topobjdir='.', topsrcdir='.',
        defines=[], non_global_defines=[], substs=[], source=None):
    '''Main function, providing config.status functionality.

    Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
    variables.

    Without the -n option, this program acts as config.status and considers
    the current directory as the top object directory, even when config.status
    is in a different directory. It will, however, treat the directory
    containing config.status as the top object directory with the -n option.

    The --recheck option, like with the original config.status, runs configure
    again, with the options given in the "ac_configure_args" subst.

    The options to this function are passed when creating the
    ConfigEnvironment. These lists, as well as the actual wrapper script
    around this function, are meant to be generated by configure.
    See build/autoconf/config.status.m4.
    '''

    if 'CONFIG_FILES' in os.environ:
        raise Exception('Using the CONFIG_FILES environment variable is not '
            'supported.')
    if 'CONFIG_HEADERS' in os.environ:
        raise Exception('Using the CONFIG_HEADERS environment variable is not '
            'supported.')

    if not os.path.isabs(topsrcdir):
        raise Exception('topsrcdir must be defined as an absolute directory: '
            '%s' % topsrcdir)

    parser = OptionParser()
    parser.add_option('--recheck', dest='recheck', action='store_true',
                      help='update config.status by reconfiguring in the same conditions')
    parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
                      help='display verbose output')
    parser.add_option('-n', dest='not_topobjdir', action='store_true',
                      help='do not consider current directory as top object directory')
    parser.add_option('-d', '--diff', action='store_true',
                      help='print diffs of changed files.')
    options, args = parser.parse_args()

    # Without -n, the current directory is meant to be the top object directory
    if not options.not_topobjdir:
        topobjdir = os.path.abspath('.')

    env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
            non_global_defines=non_global_defines, substs=substs, source=source)

    # mozinfo.json only needs written if configure changes and configure always
    # passes this environment variable.
    if 'WRITE_MOZINFO' in os.environ:
        write_mozinfo(os.path.join(topobjdir, 'mozinfo.json'), env, os.environ)

    reader = BuildReader(env)
    emitter = TreeMetadataEmitter(env)
    backend = RecursiveMakeBackend(env)
    # This won't actually do anything because of the magic of generators.
    definitions = emitter.emit(reader.read_topsrcdir())

    if options.recheck:
        # Execute configure from the top object directory
        os.chdir(topobjdir)
        os.execlp('sh', 'sh', '-c', ' '.join([os.path.join(topsrcdir, 'configure'), env.substs['ac_configure_args'], '--no-create', '--no-recursion']))

    log_level = logging.DEBUG if options.verbose else logging.INFO
    log_manager.add_terminal_logging(level=log_level)
    log_manager.enable_unstructured()

    print('Reticulating splines...', file=sys.stderr)
    summary = backend.consume(definitions)

    for line in summary.summaries():
        print(line, file=sys.stderr)

    if options.diff:
        for path, diff in sorted(summary.file_diffs.items()):
            print(diff)
Exemple #48
0
def config_status(topobjdir='.', topsrcdir='.',
        defines=[], non_global_defines=[], substs=[], source=None):
    '''Main function, providing config.status functionality.

    Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
    variables.

    Without the -n option, this program acts as config.status and considers
    the current directory as the top object directory, even when config.status
    is in a different directory. It will, however, treat the directory
    containing config.status as the top object directory with the -n option.

    The --recheck option, like with the original config.status, runs configure
    again, with the options given in the "ac_configure_args" subst.

    The options to this function are passed when creating the
    ConfigEnvironment. These lists, as well as the actual wrapper script
    around this function, are meant to be generated by configure.
    See build/autoconf/config.status.m4.
    '''

    if 'CONFIG_FILES' in os.environ:
        raise Exception('Using the CONFIG_FILES environment variable is not '
            'supported.')
    if 'CONFIG_HEADERS' in os.environ:
        raise Exception('Using the CONFIG_HEADERS environment variable is not '
            'supported.')

    if not os.path.isabs(topsrcdir):
        raise Exception('topsrcdir must be defined as an absolute directory: '
            '%s' % topsrcdir)

    default_backends = ['RecursiveMake']
    # We have a chicken/egg problem, where we only have a dict for substs after
    # creating the ConfigEnvironment, which requires argument parsing to have
    # occurred.
    for name, value in substs:
        if name == 'BUILD_BACKENDS':
            default_backends = value
            break

    parser = ArgumentParser()
    parser.add_argument('--recheck', dest='recheck', action='store_true',
                        help='update config.status by reconfiguring in the same conditions')
    parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
                        help='display verbose output')
    parser.add_argument('-n', dest='not_topobjdir', action='store_true',
                        help='do not consider current directory as top object directory')
    parser.add_argument('-d', '--diff', action='store_true',
                        help='print diffs of changed files.')
    parser.add_argument('-b', '--backend', nargs='+',
                        choices=['RecursiveMake', 'AndroidEclipse', 'CppEclipse',
                                 'VisualStudio', 'FasterMake', 'CompileDB'],
                        default=default_backends,
                        help='what backend to build (default: %s).' %
                        ' '.join(default_backends))
    options = parser.parse_args()

    # Without -n, the current directory is meant to be the top object directory
    if not options.not_topobjdir:
        topobjdir = os.path.abspath('.')

    env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
            non_global_defines=non_global_defines, substs=substs, source=source)

    # mozinfo.json only needs written if configure changes and configure always
    # passes this environment variable.
    if 'WRITE_MOZINFO' in os.environ:
        write_mozinfo(os.path.join(topobjdir, 'mozinfo.json'), env, os.environ)

    # Make an appropriate backend instance, defaulting to RecursiveMakeBackend.
    backends_cls = []
    for backend in options.backend:
        if backend == 'AndroidEclipse':
            from mozbuild.backend.android_eclipse import AndroidEclipseBackend
            if not MachCommandConditions.is_android(env):
                raise Exception('The Android Eclipse backend is not available with this configuration.')
            backends_cls.append(AndroidEclipseBackend)
        elif backend == 'CppEclipse':
            from mozbuild.backend.cpp_eclipse import CppEclipseBackend
            backends_cls.append(CppEclipseBackend)
            if os.name == 'nt':
              raise Exception('Eclipse is not supported on Windows. Consider using Visual Studio instead.')
        elif backend == 'VisualStudio':
            from mozbuild.backend.visualstudio import VisualStudioBackend
            backends_cls.append(VisualStudioBackend)
        elif backend == 'FasterMake':
            from mozbuild.backend.fastermake import FasterMakeBackend
            backends_cls.append(FasterMakeBackend)
        elif backend == 'CompileDB':
            from mozbuild.compilation.database import CompileDBBackend
            backends_cls.append(CompileDBBackend)
        else:
            backends_cls.append(RecursiveMakeBackend)

    cpu_start = time.clock()
    time_start = time.time()

    backends = [cls(env) for cls in backends_cls]

    reader = BuildReader(env)
    emitter = TreeMetadataEmitter(env)
    # This won't actually do anything because of the magic of generators.
    definitions = emitter.emit(reader.read_topsrcdir())

    if options.recheck:
        # Execute configure from the top object directory
        os.chdir(topobjdir)
        os.execlp('sh', 'sh', '-c', ' '.join([os.path.join(topsrcdir, 'configure'), env.substs['ac_configure_args'], '--no-create', '--no-recursion']))

    log_level = logging.DEBUG if options.verbose else logging.INFO
    log_manager.add_terminal_logging(level=log_level)
    log_manager.enable_unstructured()

    print('Reticulating splines...', file=sys.stderr)
    if len(backends) > 1:
        definitions = list(definitions)

    for the_backend in backends:
        the_backend.consume(definitions)

    execution_time = 0.0
    for obj in chain((reader, emitter), backends):
        summary = obj.summary()
        print(summary, file=sys.stderr)
        execution_time += summary.execution_time

    cpu_time = time.clock() - cpu_start
    wall_time = time.time() - time_start
    efficiency = cpu_time / wall_time if wall_time else 100
    untracked = wall_time - execution_time

    print(
        'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: '
        '{:.0%}; Untracked: {:.2f}s'.format(
            wall_time, cpu_time, efficiency, untracked),
        file=sys.stderr
    )

    if options.diff:
        for the_backend in backends:
            for path, diff in sorted(the_backend.file_diffs.items()):
                print('\n'.join(diff))

    # Advertise Visual Studio if appropriate.
    if os.name == 'nt' and 'VisualStudio' not in options.backend:
        print(VISUAL_STUDIO_ADVERTISEMENT)

    # Advertise Eclipse if it is appropriate.
    if MachCommandConditions.is_android(env):
        if 'AndroidEclipse' not in options.backend:
            print(ANDROID_IDE_ADVERTISEMENT)

    if env.substs.get('MOZ_ARTIFACT_BUILDS', False):
        # Execute |mach artifact install| from the top source directory.
        os.chdir(topsrcdir)
        return subprocess.check_call([sys.executable, os.path.join(topsrcdir, 'mach'), 'artifact', 'install'])
def config_status(topobjdir = '.', topsrcdir = '.',
                  defines = [], non_global_defines = [], substs = [],
                  files = [], headers = []):
    '''Main function, providing config.status functionality.

    Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
    variables, but like config.status from autoconf 2.6, single files may be
    generated with the --file and --header options. Several such options can
    be given to generate several files at the same time.

    Without the -n option, this program acts as config.status and considers
    the current directory as the top object directory, even when config.status
    is in a different directory. It will, however, treat the directory
    containing config.status as the top object directory with the -n option,
    while files given to the --file and --header arguments are considered
    relative to the current directory.

    The --recheck option, like with the original config.status, runs configure
    again, with the options given in the "ac_configure_args" subst.

    The options to this function are passed when creating the
    ConfigEnvironment, except for files and headers, which contain the list
    of files and headers to be generated by default. These lists, as well as
    the actual wrapper script around this function, are meant to be generated
    by configure. See build/autoconf/config.status.m4.

    Unlike config.status behaviour with CONFIG_FILES and CONFIG_HEADERS,
    but like config.status behaviour with --file and --header, providing
    files or headers on the command line inhibits the default generation of
    files when given headers and headers when given files.

    Unlike config.status, the FILE:TEMPLATE syntax is not supported for
    files and headers. The template is always the filename suffixed with
    '.in', in the corresponding directory under the top source directory.
    '''

    if 'CONFIG_FILES' in os.environ:
        raise Exception, 'Using the CONFIG_FILES environment variable is not supported. Use --file instead.'
    if 'CONFIG_HEADERS' in os.environ:
        raise Exception, 'Using the CONFIG_HEADERS environment variable is not supported. Use --header instead.'

    parser = OptionParser()
    parser.add_option('--recheck', dest='recheck', action='store_true',
                      help='update config.status by reconfiguring in the same conditions')
    parser.add_option('--file', dest='files', metavar='FILE', action='append',
                      help='instantiate the configuration file FILE')
    parser.add_option('--header', dest='headers', metavar='FILE', action='append',
                      help='instantiate the configuration header FILE')
    parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
                      help='display verbose output')
    parser.add_option('-n', dest='not_topobjdir', action='store_true',
                      help='do not consider current directory as top object directory')
    (options, args) = parser.parse_args()

    # Without -n, the current directory is meant to be the top object directory
    if not options.not_topobjdir:
        topobjdir = '.'

    env = ConfigEnvironment(topsrcdir, topobjdir, defines=defines,
            non_global_defines=non_global_defines, substs=substs)

    reader = BuildReader(env)
    emitter = TreeMetadataEmitter(env)
    backend = RecursiveMakeBackend(env)
    # This won't actually do anything because of the magic of generators.
    definitions = emitter.emit(reader.read_topsrcdir())

    if options.recheck:
        # Execute configure from the top object directory
        if not os.path.isabs(topsrcdir):
            topsrcdir = relpath(topsrcdir, topobjdir)
        os.chdir(topobjdir)
        os.execlp('sh', 'sh', '-c', ' '.join([os.path.join(topsrcdir, 'configure'), env.substs['ac_configure_args'], '--no-create', '--no-recursion']))

    if options.files:
        files = options.files
        headers = []
    if options.headers:
        headers = options.headers
        if not options.files:
            files = []
    # Default to display messages when giving --file or --headers on the
    # command line.
    log_level = logging.INFO

    if options.files or options.headers or options.verbose:
        log_level = logging.DEBUG

    log_manager.add_terminal_logging(level=log_level)
    log_manager.enable_unstructured()

    if not options.files and not options.headers:
        print('Reticulating splines...', file=sys.stderr)
        summary = backend.consume(definitions)

        for line in summary.summaries():
            print(line, file=sys.stderr)

        files = [os.path.join(topobjdir, f) for f in files]
        headers = [os.path.join(topobjdir, f) for f in headers]

    for file in files:
        env.create_config_file(file)
    for header in headers:
        env.create_config_header(header)