Exemplo n.º 1
0
    def _process_reftest_manifest(self, sandbox, flavor, manifest_path):
        manifest_path = mozpath.normpath(manifest_path)
        manifest_full_path = mozpath.normpath(mozpath.join(
            sandbox['SRCDIR'], manifest_path))
        manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path,
            sandbox['TOPSRCDIR']))

        manifest = reftest.ReftestManifest()
        manifest.load(manifest_full_path)

        # reftest manifests don't come from manifest parser. But they are
        # similar enough that we can use the same emitted objects. Note
        # that we don't perform any installs for reftests.
        obj = TestManifest(sandbox, manifest_full_path, manifest,
                flavor=flavor, install_prefix='%s/' % flavor,
                relpath=mozpath.join(manifest_reldir,
                    mozpath.basename(manifest_path)))

        for test in sorted(manifest.files):
            obj.tests.append({
                'path': test,
                'here': mozpath.dirname(test),
                'manifest': manifest_full_path,
                'name': mozpath.basename(test),
                'head': '',
                'tail': '',
                'support-files': '',
                'subsuite': '',
            })

        yield obj
Exemplo n.º 2
0
def generate_gn_config(config, srcdir, output, non_unified_sources, gn_binary,
                       input_variables, sandbox_variables):

    def str_for_arg(v):
        if v in (True, False):
            return str(v).lower()
        return '"%s"' % v

    gn_args = '--args=%s' % ' '.join(['%s=%s' % (k, str_for_arg(v)) for k, v
                                      in input_variables.iteritems()])
    gn_arg_string = '_'.join([str(input_variables[k]) for k in sorted(input_variables.keys())])
    out_dir = mozpath.join(output, 'gn-output')
    gen_args = [
        config.substs['GN'], 'gen', out_dir, gn_args, '--ide=json',
    ]
    print("Running \"%s\"" % ' '.join(gen_args), file=sys.stderr)
    subprocess.check_call(gen_args, cwd=srcdir, stderr=subprocess.STDOUT)


    gn_config_file = mozpath.join(out_dir, 'project.json')

    with open(gn_config_file, 'r') as fh:
        gn_out = json.load(fh)
        gn_out = filter_gn_config(gn_out, config, sandbox_variables,
                                  input_variables)

    os.remove(gn_config_file)

    gn_out_file = mozpath.join(out_dir, gn_arg_string + '.json')
    with open(gn_out_file, 'w') as fh:
        json.dump(gn_out, fh, indent=4, sort_keys=True, separators=(',', ': '))
    print("Wrote gn config to %s" % gn_out_file)
Exemplo n.º 3
0
 def __init__(self, context, basename, cargo_file, crate_type, dependencies,
              features, target_dir, **args):
     StaticLibrary.__init__(self, context, basename, **args)
     self.cargo_file = cargo_file
     self.crate_type = crate_type
     # We need to adjust our naming here because cargo replaces '-' in
     # package names defined in Cargo.toml with underscores in actual
     # filenames. But we need to keep the basename consistent because
     # many other things in the build system depend on that.
     assert self.crate_type == 'staticlib'
     self.lib_name = '%s%s%s' % (context.config.rust_lib_prefix,
                                  basename.replace('-', '_'),
                                  context.config.rust_lib_suffix)
     self.dependencies = dependencies
     self.features = features
     self.target_dir = target_dir
     # Skip setting properties below which depend on cargo
     # when we don't have a compile environment. The required
     # config keys won't be available, but the instance variables
     # that we don't set should never be accessed by the actual
     # build in that case.
     if not context.config.substs.get('COMPILE_ENVIRONMENT'):
         return
     build_dir = mozpath.join(target_dir,
                              cargo_output_directory(context, self.TARGET_SUBST_VAR))
     self.import_name = mozpath.join(build_dir, self.lib_name)
     self.deps_path = mozpath.join(build_dir, 'deps')
Exemplo n.º 4
0
    def test_android_eclipse(self):
        env = self._consume('android_eclipse', RecursiveMakeBackend)

        with open(mozpath.join(env.topobjdir, 'backend.mk'), 'rb') as fh:
            lines = fh.readlines()

        lines = [line.rstrip() for line in lines]

        # Dependencies first.
        self.assertIn('ANDROID_ECLIPSE_PROJECT_main1: target1 target2', lines)
        self.assertIn('ANDROID_ECLIPSE_PROJECT_main4: target3 target4', lines)

        command_template = '\t$(call py_action,process_install_manifest,' + \
                           '--no-remove --no-remove-all-directory-symlinks ' + \
                           '--no-remove-empty-directories %s %s.manifest)'
        # Commands second.
        for project_name in ['main1', 'main2', 'library1', 'library2']:
            stem = '%s/android_eclipse/%s' % (env.topobjdir, project_name)
            self.assertIn(command_template % (stem, stem), lines)

        # Projects declared in subdirectories.
        with open(mozpath.join(env.topobjdir, 'subdir', 'backend.mk'), 'rb') as fh:
            lines = fh.readlines()

        lines = [line.rstrip() for line in lines]

        self.assertIn('ANDROID_ECLIPSE_PROJECT_submain: subtarget1 subtarget2', lines)

        for project_name in ['submain', 'sublibrary']:
            # Destination and install manifest are relative to topobjdir.
            stem = '%s/android_eclipse/%s' % (env.topobjdir, project_name)
            self.assertIn(command_template % (stem, stem), lines)
Exemplo n.º 5
0
    def process_tests_artifact(self, filename, processed_filename):
        from mozbuild.action.test_archive import OBJDIR_TEST_FILES
        added_entry = False

        with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
            reader = JarReader(filename)
            for filename, entry in reader.entries.iteritems():
                for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
                    if not mozpath.match(filename, pattern):
                        continue
                    destpath = mozpath.relpath(filename, src_prefix)
                    destpath = mozpath.join(dest_prefix, destpath)
                    self.log(logging.INFO, 'artifact',
                             {'destpath': destpath},
                             'Adding {destpath} to processed archive')
                    mode = entry['external_attr'] >> 16
                    writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)
                    added_entry = True
                    break
                for files_entry in OBJDIR_TEST_FILES.values():
                    origin_pattern = files_entry['pattern']
                    leaf_filename = filename
                    if 'dest' in files_entry:
                        dest = files_entry['dest']
                        origin_pattern = mozpath.join(dest, origin_pattern)
                        leaf_filename = filename[len(dest) + 1:]
                    if mozpath.match(filename, origin_pattern):
                        destpath = mozpath.join('..', files_entry['base'], leaf_filename)
                        mode = entry['external_attr'] >> 16
                        writer.add(destpath.encode('utf-8'), reader[filename], mode=mode)

        if not added_entry:
            raise ValueError('Archive format changed! No pattern from "{patterns}"'
                             'matched an archive path.'.format(
                                 patterns=LinuxArtifactJob.test_artifact_patterns))
Exemplo n.º 6
0
    def test_install_manifests_package_tests(self):
        """Ensure test suites honor package_tests=False."""
        env = self._consume('test-manifests-package-tests', RecursiveMakeBackend)

        tests_dir = mozpath.join(env.topobjdir, '_tests')

        all_tests_path = mozpath.join(env.topobjdir, 'all-tests.json')
        self.assertTrue(os.path.exists(all_tests_path))

        with open(all_tests_path, 'rt') as fh:
            o = json.load(fh)

            self.assertIn('mochitest.js', o)
            self.assertIn('not_packaged.java', o)

        man_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install')
        self.assertTrue(os.path.isdir(man_dir))

        full = mozpath.join(man_dir, 'tests')
        self.assertTrue(os.path.exists(full))

        m = InstallManifest(path=full)

        # Only mochitest.js should be in the install manifest.
        self.assertTrue('testing/mochitest/tests/mochitest.js' in m)

        # The path is odd here because we do not normalize at test manifest
        # processing time.  This is a fragile test because there's currently no
        # way to iterate the manifest.
        self.assertFalse('instrumentation/./not_packaged.java' in m)
Exemplo n.º 7
0
    def test_test_manifests_files_written(self):
        """Ensure test manifests get turned into files."""
        env = self._consume('test-manifests-written', RecursiveMakeBackend)

        tests_dir = mozpath.join(env.topobjdir, '_tests')
        m_master = mozpath.join(tests_dir, 'testing', 'mochitest', 'tests', 'mochitest.ini')
        x_master = mozpath.join(tests_dir, 'xpcshell', 'xpcshell.ini')
        self.assertTrue(os.path.exists(m_master))
        self.assertTrue(os.path.exists(x_master))

        lines = [l.strip() for l in open(x_master, 'rt').readlines()]
        self.assertEqual(lines, [
            '; THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.',
            '',
            '[include:dir1/xpcshell.ini]',
            '[include:xpcshell.ini]',
        ])

        all_tests_path = mozpath.join(env.topobjdir, 'all-tests.json')
        self.assertTrue(os.path.exists(all_tests_path))

        with open(all_tests_path, 'rt') as fh:
            o = json.load(fh)

            self.assertIn('xpcshell.js', o)
            self.assertIn('dir1/test_bar.js', o)

            self.assertEqual(len(o['xpcshell.js']), 1)
Exemplo n.º 8
0
    def test_final_target(self):
        """Test that FINAL_TARGET is written to backend.mk correctly."""
        env = self._consume('final_target', RecursiveMakeBackend)

        final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)"
        expected = dict()
        expected[env.topobjdir] = []
        expected[mozpath.join(env.topobjdir, 'both')] = [
            'XPI_NAME = mycrazyxpi',
            'DIST_SUBDIR = asubdir',
            final_target_rule
        ]
        expected[mozpath.join(env.topobjdir, 'dist-subdir')] = [
            'DIST_SUBDIR = asubdir',
            final_target_rule
        ]
        expected[mozpath.join(env.topobjdir, 'xpi-name')] = [
            'XPI_NAME = mycrazyxpi',
            final_target_rule
        ]
        expected[mozpath.join(env.topobjdir, 'final-target')] = [
            'FINAL_TARGET = $(DEPTH)/random-final-target'
        ]
        for key, expected_rules in expected.iteritems():
            backend_path = mozpath.join(key, 'backend.mk')
            lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]]
            found = [str for str in lines if
                str.startswith('FINAL_TARGET') or str.startswith('XPI_NAME') or
                str.startswith('DIST_SUBDIR')]
            self.assertEqual(found, expected_rules)
Exemplo n.º 9
0
    def _binding_info(self, p):
        """Compute binding metadata for an input path.

        Returns a tuple of:

          (stem, binding_stem, is_event, output_files)

        output_files is itself a tuple. The first two items are the binding
        header and C++ paths, respectively. The 2nd pair are the event header
        and C++ paths or None if this isn't an event binding.
        """
        basename = mozpath.basename(p)
        stem = mozpath.splitext(basename)[0]
        binding_stem = '%sBinding' % stem

        if stem in self._exported_stems:
            header_dir = self._exported_header_dir
        else:
            header_dir = self._codegen_dir

        is_event = stem in self._generated_events_stems

        files = (
            mozpath.join(header_dir, '%s.h' % binding_stem),
            mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem),
            mozpath.join(header_dir, '%s.h' % stem) if is_event else None,
            mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None,
        )

        return stem, binding_stem, is_event, header_dir, files
Exemplo n.º 10
0
 def test_basic(self):
     """Ensure the RecursiveMakeBackend works without error."""
     env = self._consume('stub0', RecursiveMakeBackend)
     self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
         'backend.RecursiveMakeBackend')))
     self.assertTrue(os.path.exists(mozpath.join(env.topobjdir,
         'backend.RecursiveMakeBackend.pp')))
Exemplo n.º 11
0
    def test_final_target(self):
        """Test that FINAL_TARGET is written to backend.mk correctly."""
        env = self._consume("final_target", RecursiveMakeBackend)

        final_target_rule = (
            "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)"
        )
        expected = dict()
        expected[env.topobjdir] = []
        expected[mozpath.join(env.topobjdir, "both")] = [
            "XPI_NAME = mycrazyxpi",
            "DIST_SUBDIR = asubdir",
            final_target_rule,
        ]
        expected[mozpath.join(env.topobjdir, "dist-subdir")] = ["DIST_SUBDIR = asubdir", final_target_rule]
        expected[mozpath.join(env.topobjdir, "xpi-name")] = ["XPI_NAME = mycrazyxpi", final_target_rule]
        expected[mozpath.join(env.topobjdir, "final-target")] = ["FINAL_TARGET = $(DEPTH)/random-final-target"]
        for key, expected_rules in expected.iteritems():
            backend_path = mozpath.join(key, "backend.mk")
            lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
            found = [
                str
                for str in lines
                if str.startswith("FINAL_TARGET") or str.startswith("XPI_NAME") or str.startswith("DIST_SUBDIR")
            ]
            self.assertEqual(found, expected_rules)
Exemplo n.º 12
0
    def test_android_eclipse(self):
        env = self._consume("android_eclipse", RecursiveMakeBackend)

        with open(mozpath.join(env.topobjdir, "backend.mk"), "rb") as fh:
            lines = fh.readlines()

        lines = [line.rstrip() for line in lines]

        # Dependencies first.
        self.assertIn("ANDROID_ECLIPSE_PROJECT_main1: target1 target2", lines)
        self.assertIn("ANDROID_ECLIPSE_PROJECT_main4: target3 target4", lines)

        command_template = (
            "\t$(call py_action,process_install_manifest,"
            + "--no-remove --no-remove-all-directory-symlinks "
            + "--no-remove-empty-directories %s %s.manifest)"
        )
        # Commands second.
        for project_name in ["main1", "main2", "library1", "library2"]:
            stem = "%s/android_eclipse/%s" % (env.topobjdir, project_name)
            self.assertIn(command_template % (stem, stem), lines)

        # Projects declared in subdirectories.
        with open(mozpath.join(env.topobjdir, "subdir", "backend.mk"), "rb") as fh:
            lines = fh.readlines()

        lines = [line.rstrip() for line in lines]

        self.assertIn("ANDROID_ECLIPSE_PROJECT_submain: subtarget1 subtarget2", lines)

        for project_name in ["submain", "sublibrary"]:
            # Destination and install manifest are relative to topobjdir.
            stem = "%s/android_eclipse/%s" % (env.topobjdir, project_name)
            self.assertIn(command_template % (stem, stem), lines)
    def _get_manager_args(self):
        tmp = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, tmp)

        cache_dir = mozpath.join(tmp, 'cache')
        os.mkdir(cache_dir)

        ip = self._static_input_paths

        inputs = (
            ip,
            {mozpath.splitext(mozpath.basename(p))[0] for p in ip},
            set(),
            set(),
        )

        return dict(
            config_path=self._config_path,
            inputs=inputs,
            exported_header_dir=mozpath.join(tmp, 'exports'),
            codegen_dir=mozpath.join(tmp, 'codegen'),
            state_path=mozpath.join(tmp, 'state.json'),
            make_deps_path=mozpath.join(tmp, 'codegen.pp'),
            make_deps_target='codegen.pp',
            cache_dir=cache_dir,
        )
Exemplo n.º 14
0
    def test_test_manifests_files_written(self):
        """Ensure test manifests get turned into files."""
        env = self._consume("test-manifests-written", RecursiveMakeBackend)

        tests_dir = mozpath.join(env.topobjdir, "_tests")
        m_master = mozpath.join(tests_dir, "testing", "mochitest", "tests", "mochitest.ini")
        x_master = mozpath.join(tests_dir, "xpcshell", "xpcshell.ini")
        self.assertTrue(os.path.exists(m_master))
        self.assertTrue(os.path.exists(x_master))

        lines = [l.strip() for l in open(x_master, "rt").readlines()]
        self.assertEqual(
            lines,
            [
                "; THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.",
                "",
                "[include:dir1/xpcshell.ini]",
                "[include:xpcshell.ini]",
            ],
        )

        all_tests_path = mozpath.join(env.topobjdir, "all-tests.json")
        self.assertTrue(os.path.exists(all_tests_path))

        with open(all_tests_path, "rt") as fh:
            o = json.load(fh)

            self.assertIn("xpcshell.js", o)
            self.assertIn("dir1/test_bar.js", o)

            self.assertEqual(len(o["xpcshell.js"]), 1)
Exemplo n.º 15
0
def get_config_files(data):
    config_status = mozpath.join(data['objdir'], 'config.status')
    if not os.path.exists(config_status):
        return [], []

    configure = mozpath.join(data['srcdir'], 'configure')
    config_files = []
    command_files = []

    # Scan the config.status output for information about configuration files
    # it generates.
    config_status_output = subprocess.check_output(
        [data['shell'], '-c', '%s --help' % config_status],
        stderr=subprocess.STDOUT).splitlines()
    state = None
    for line in config_status_output:
        if line.startswith('Configuration') and line.endswith(':'):
            if line.endswith('commands:'):
                state = 'commands'
            else:
                state = 'config'
        elif not line.strip():
            state = None
        elif state:
            for f, t in (split_template(couple) for couple in line.split()):
                f = mozpath.join(data['objdir'], f)
                t = mozpath.join(data['srcdir'], t)
                if state == 'commands':
                    command_files.append(f)
                else:
                    config_files.append((f, t))

    return config_files, command_files
Exemplo n.º 16
0
    def test_dirs_traversal_simple(self):
        reader = self.reader("traversal-simple")
        objs = self.read_topsrcdir(reader, filter_common=False)
        self.assertEqual(len(objs), 4)

        for o in objs:
            self.assertIsInstance(o, DirectoryTraversal)
            self.assertEqual(o.test_dirs, [])
            self.assertTrue(os.path.isabs(o.context_main_path))
            self.assertEqual(len(o.context_all_paths), 1)

        reldirs = [o.relativedir for o in objs]
        self.assertEqual(reldirs, ["", "foo", "foo/biz", "bar"])

        self.assertEqual(objs[3].affected_tiers, {"misc"})

        dirs = [[d.full_path for d in o.dirs] for o in objs]
        self.assertEqual(
            dirs,
            [
                [mozpath.join(reader.config.topsrcdir, "foo"), mozpath.join(reader.config.topsrcdir, "bar")],
                [mozpath.join(reader.config.topsrcdir, "foo", "biz")],
                [],
                [],
            ],
        )
Exemplo n.º 17
0
    def consume_finished(self):
        CommonBackend.consume_finished(self)

        for objdir, backend_file in sorted(self._backend_files.items()):
            for obj in backend_file.delayed_generated_files:
                self._process_generated_file(backend_file, obj)
            with self._write_file(fh=backend_file):
                pass

        with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh:
            acdefines = [name for name in self.environment.defines
                if not name in self.environment.non_global_defines]
            acdefines_flags = ' '.join(['-D%s=%s' % (name,
                shell_quote(self.environment.defines[name]))
                for name in sorted(acdefines)])
            # TODO: AB_CD only exists in Makefiles at the moment.
            acdefines_flags += ' -DAB_CD=en-US'

            fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n')
            fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n')
            fh.write('ACDEFINES = %s\n' % acdefines_flags)
            fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % (
                os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir)
            ))
            fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n')
            fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n')
            fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n')
            fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n')
            fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n')

        # Run 'tup init' if necessary.
        if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")):
            tup = self.environment.substs.get('TUP', 'tup')
            self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init'])
Exemplo n.º 18
0
    def _handle_idl_manager(self, manager):
        build_files = self._install_manifests['xpidl']

        for p in ('Makefile', 'backend.mk', '.deps/.mkdir.done',
            'xpt/.mkdir.done'):
            build_files.add_optional_exists(p)

        for idl in manager.idls.values():
            self._install_manifests['dist_idl'].add_symlink(idl['source'],
                idl['basename'])
            self._install_manifests['dist_include'].add_optional_exists('%s.h'
                % idl['root'])

        for module in manager.modules:
            build_files.add_optional_exists(mozpath.join('xpt',
                '%s.xpt' % module))
            build_files.add_optional_exists(mozpath.join('.deps',
                '%s.pp' % module))

        modules = manager.modules
        xpt_modules = sorted(modules.keys())
        rules = []

        for module in xpt_modules:
            deps = sorted(modules[module])
            idl_deps = ['$(dist_idl_dir)/%s.idl' % dep for dep in deps]
            rules.extend([
                # It may seem strange to have the .idl files listed as
                # prerequisites both here and in the auto-generated .pp files.
                # It is necessary to list them here to handle the case where a
                # new .idl is added to an xpt. If we add a new .idl and nothing
                # else has changed, the new .idl won't be referenced anywhere
                # except in the command invocation. Therefore, the .xpt won't
                # be rebuilt because the dependencies say it is up to date. By
                # listing the .idls here, we ensure the make file has a
                # reference to the new .idl. Since the new .idl presumably has
                # an mtime newer than the .xpt, it will trigger xpt generation.
                '$(idl_xpt_dir)/%s.xpt: %s' % (module, ' '.join(idl_deps)),
                '\t@echo "$(notdir $@)"',
                '\t$(idlprocess) $(basename $(notdir $@)) %s' % ' '.join(deps),
                '',
            ])

        # Create dependency for output header so we force regeneration if the
        # header was deleted. This ideally should not be necessary. However,
        # some processes (such as PGO at the time this was implemented) wipe
        # out dist/include without regard to our install manifests.

        out_path = os.path.join(self.environment.topobjdir, 'config',
            'makefiles', 'xpidl', 'Makefile')
        with self._write_file(out_path) as fh:
            self.environment.create_config_file(fh, extra=dict(
                xpidl_rules='\n'.join(rules),
                xpidl_modules=' '.join(xpt_modules),
            ))

        # The Makefile can't regenerate itself because of custom substitution.
        # We need to list it here to ensure changes cause regeneration.
        self.backend_input_files.add(os.path.join(self.environment.topsrcdir,
            'config', 'makefiles', 'xpidl', 'Makefile.in'))
Exemplo n.º 19
0
    def register_idl(self, idl, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(idl.source_path)
        root = mozpath.splitext(basename)[0]
        xpt = '%s.xpt' % idl.module
        manifest = mozpath.join(idl.install_target, 'components', 'interfaces.manifest')
        chrome_manifest = mozpath.join(idl.install_target, 'chrome.manifest')

        entry = {
            'source': idl.source_path,
            'module': idl.module,
            'basename': basename,
            'root': root,
            'manifest': manifest,
        }

        if not allow_existing and entry['basename'] in self.idls:
            raise Exception('IDL already registered: %s' % entry['basename'])

        self.idls[entry['basename']] = entry
        t = self.modules.setdefault(entry['module'], (idl.install_target, set()))
        t[1].add(entry['root'])

        if idl.add_to_manifest:
            self.interface_manifests.setdefault(manifest, set()).add(xpt)
            self.chrome_manifests.add(chrome_manifest)
Exemplo n.º 20
0
def repackage_installer(topsrcdir, tag, setupexe, package, output):
    if package and not zipfile.is_zipfile(package):
        raise Exception("Package file %s is not a valid .zip file." % package)

    # We need the full path for the tag and output, since we chdir later.
    tag = mozpath.realpath(tag)
    output = mozpath.realpath(output)
    ensureParentDir(output)

    tmpdir = tempfile.mkdtemp()
    old_cwd = os.getcwd()
    try:
        if package:
            z = zipfile.ZipFile(package)
            z.extractall(tmpdir)
            z.close()

        # Copy setup.exe into the root of the install dir, alongside the
        # package.
        shutil.copyfile(setupexe, mozpath.join(tmpdir, mozpath.basename(setupexe)))

        # archive_exe requires us to be in the directory where the package is
        # unpacked (the tmpdir)
        os.chdir(tmpdir)

        sfx_package = mozpath.join(topsrcdir, 'other-licenses/7zstub/firefox/7zSD.sfx')

        package_name = 'firefox' if package else None
        archive_exe(package_name, tag, sfx_package, output)

    finally:
        os.chdir(old_cwd)
        shutil.rmtree(tmpdir)
Exemplo n.º 21
0
    def register_idl(self, idl, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(idl.source_path)
        root = mozpath.splitext(basename)[0]
        xpt = "%s.xpt" % idl.module
        manifest = mozpath.join(idl.install_target, "components", "interfaces.manifest")
        chrome_manifest = mozpath.join(idl.install_target, "chrome.manifest")

        entry = {
            "source": idl.source_path,
            "module": idl.module,
            "basename": basename,
            "root": root,
            "manifest": manifest,
        }

        if not allow_existing and entry["basename"] in self.idls:
            raise Exception("IDL already registered: %s" % entry["basename"])

        self.idls[entry["basename"]] = entry
        t = self.modules.setdefault(entry["module"], (idl.install_target, set()))
        t[1].add(entry["root"])

        if idl.add_to_manifest:
            self.interface_manifests.setdefault(manifest, set()).add(xpt)
            self.chrome_manifests.add(chrome_manifest)
Exemplo n.º 22
0
    def test_objdir_path(self):
        config = self.config
        ctxt = Context(config=config)
        ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))

        path = ObjDirPath(ctxt, "!qux")
        self.assertEqual(path, "!qux")
        self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "foo", "qux"))

        path = ObjDirPath(ctxt, "!../bar/qux")
        self.assertEqual(path, "!../bar/qux")
        self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "bar", "qux"))

        path = ObjDirPath(ctxt, "!/qux/qux")
        self.assertEqual(path, "!/qux/qux")
        self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "qux", "qux"))

        with self.assertRaises(ValueError):
            path = ObjDirPath(ctxt, "../bar/qux")

        with self.assertRaises(ValueError):
            path = ObjDirPath(ctxt, "/qux/qux")

        path = ObjDirPath(path)
        self.assertIsInstance(path, ObjDirPath)
        self.assertEqual(path, "!/qux/qux")
        self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "qux", "qux"))

        path = Path(path)
        self.assertIsInstance(path, ObjDirPath)
Exemplo n.º 23
0
    def _process_reftest_manifest(self, sandbox, flavor, manifest_path):
        manifest_path = mozpath.normpath(manifest_path)
        manifest_full_path = mozpath.normpath(mozpath.join(sandbox["SRCDIR"], manifest_path))
        manifest_reldir = mozpath.dirname(mozpath.relpath(manifest_full_path, sandbox["TOPSRCDIR"]))

        manifest = reftest.ReftestManifest()
        manifest.load(manifest_full_path)

        # reftest manifests don't come from manifest parser. But they are
        # similar enough that we can use the same emitted objects. Note
        # that we don't perform any installs for reftests.
        obj = TestManifest(
            sandbox,
            manifest_full_path,
            manifest,
            flavor=flavor,
            install_prefix="%s/" % flavor,
            relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)),
        )

        for test in sorted(manifest.files):
            obj.tests.append(
                {
                    "path": test,
                    "here": mozpath.dirname(test),
                    "manifest": manifest_full_path,
                    "name": mozpath.basename(test),
                    "head": "",
                    "tail": "",
                    "support-files": "",
                    "subsuite": "",
                }
            )

        yield obj
Exemplo n.º 24
0
    def _process_files(self, obj, files, target, preprocessor = False, marker='#', target_is_file=False, optional=False):
        for f in files:
            if optional:
                full_dest = f
            elif target_is_file:
                full_dest = target
            else:
                full_dest = mozpath.join(target, mozpath.basename(f))
            install_manifest, dest = self._get_manifest_from_target(full_dest)
            source = None if (obj is None) else mozpath.normpath(mozpath.join(obj.srcdir, f))
            if preprocessor:
                dep_file = mozpath.join(self.dep_path, target, mozpath.basename(f) +'.pp')
                exist_defines = self._paths_to_defines.get(obj.srcdir, {})

                xul_defines = dict(exist_defines)
                for flag in self.XULPPFLAGS:
                    if flag.startswith('-D'):
                        define = flag[2:].split('=')
                        xul_defines[define[0]] = define[1] if len(define) >= 2 else ''
                defines = compute_defines(obj.config, defines = xul_defines)
                new_marker = marker
                if marker == 'jar':
                    new_marker = '%' if f.endswith('.css') else '#'
                install_manifest.add_preprocess(source, dest, dep_file, marker=new_marker, defines=defines)
            elif optional:
                install_manifest.add_optional_exists(dest)
            else:
                install_manifest.add_symlink(source, dest)
Exemplo n.º 25
0
    def generateXpcomCppHeader(self, config, filename, cache_dir):
        prefixname = filename[:-4]
        targetFilePath  = mozpath.join(config.topobjdir, 'dist/include', prefixname  + ".h")
        if not self.targetNeedBuild(targetFilePath):
            return

        sourceFilePath = mozpath.join(config.topobjdir, 'dist/idl', filename)

        includePaths = [mozpath.join(config.topobjdir, 'dist/idl'),
            mozpath.join(self.libxul_sdk, 'idl')]
        import xpidl
        import header
        try:
            filename = mozpath.join('../../../dist/idl', filename)
            p = xpidl.IDLParser(outputdir=cache_dir)
            idl = p.parse(open(sourceFilePath).read(), filename=filename)
            idl.resolve(includePaths, p)
            outfd = open(targetFilePath, 'w')
            header.print_header(idl, outfd, filename)
            outfd.close()
            deps = set()
            self.updateIdlDeps(config, idl.deps, deps)
            self.addDependencies(targetFilePath, deps)
            self.addDependencies(targetFilePath, [targetFilePath])
            print('%s -> %s' % (sourceFilePath, targetFilePath))
        except Exception as e:
            print("Failed to generate IDL from %s to %s!" % (sourceFilePath, targetFilePath));
            print(e)
Exemplo n.º 26
0
    def test_objdir_path(self):
        config = self.config
        ctxt = Context(config=config)
        ctxt.push_source(mozpath.join(config.topsrcdir, 'foo', 'moz.build'))

        path = ObjDirPath(ctxt, '!qux')
        self.assertEqual(path, '!qux')
        self.assertEqual(path.full_path,
                         mozpath.join(config.topobjdir, 'foo', 'qux'))

        path = ObjDirPath(ctxt, '!../bar/qux')
        self.assertEqual(path, '!../bar/qux')
        self.assertEqual(path.full_path,
                         mozpath.join(config.topobjdir, 'bar', 'qux'))

        path = ObjDirPath(ctxt, '!/qux/qux')
        self.assertEqual(path, '!/qux/qux')
        self.assertEqual(path.full_path,
                         mozpath.join(config.topobjdir, 'qux', 'qux'))

        with self.assertRaises(ValueError):
            path = ObjDirPath(ctxt, '../bar/qux')

        with self.assertRaises(ValueError):
            path = ObjDirPath(ctxt, '/qux/qux')

        path = ObjDirPath(path)
        self.assertIsInstance(path, ObjDirPath)
        self.assertEqual(path, '!/qux/qux')
        self.assertEqual(path.full_path,
                         mozpath.join(config.topobjdir, 'qux', 'qux'))

        path = Path(path)
        self.assertIsInstance(path, ObjDirPath)
Exemplo n.º 27
0
    def test_binary_components(self):
        """Ensure binary components are correctly handled."""
        env = self._consume('binary-components', RecursiveMakeBackend)

        with open(mozpath.join(env.topobjdir, 'foo', 'backend.mk')) as fh:
            lines = fh.readlines()[2:]

        self.assertEqual(lines, [
            'misc::\n',
            '\t$(call py_action,buildlist,$(DEPTH)/dist/bin/chrome.manifest '
            + "'manifest components/components.manifest')\n",
            '\t$(call py_action,buildlist,'
            + '$(DEPTH)/dist/bin/components/components.manifest '
            + "'binary-component foo')\n",
            'LIBRARY_NAME := foo\n',
            'FORCE_SHARED_LIB := 1\n',
            'IMPORT_LIBRARY := foo\n',
            'SHARED_LIBRARY := foo\n',
            'IS_COMPONENT := 1\n',
            'DSO_SONAME := foo\n',
        ])

        with open(mozpath.join(env.topobjdir, 'bar', 'backend.mk')) as fh:
            lines = fh.readlines()[2:]

        self.assertEqual(lines, [
            'LIBRARY_NAME := bar\n',
            'FORCE_SHARED_LIB := 1\n',
            'IMPORT_LIBRARY := bar\n',
            'SHARED_LIBRARY := bar\n',
            'IS_COMPONENT := 1\n',
            'DSO_SONAME := bar\n',
        ])
Exemplo n.º 28
0
    def normalize_path(self, path, filesystem_absolute=False, srcdir=None):
        """Normalizes paths.

        If the path is absolute, behavior is governed by filesystem_absolute.
        If filesystem_absolute is True, the path is interpreted as absolute on
        the actual filesystem. If it is false, the path is treated as absolute
        within the current topsrcdir.

        If the path is not absolute, it will be treated as relative to the
        currently executing file. If there is no currently executing file, it
        will be treated as relative to topsrcdir.
        """
        if os.path.isabs(path):
            if filesystem_absolute:
                return path
            for root in [self.topsrcdir] + self.external_source_dirs:
                # mozpath.join would ignore the self.topsrcdir argument if we
                # passed in the absolute path, so omit the leading /
                p = mozpath.normpath(mozpath.join(root, path[1:]))
                if os.path.exists(p):
                    return p
            # mozpath.join would ignore the self.topsrcdir argument if we passed
            # in the absolute path, so omit the leading /
            return mozpath.normpath(mozpath.join(self.topsrcdir, path[1:]))
        elif srcdir:
            return mozpath.normpath(mozpath.join(srcdir, path))
        elif len(self._execution_stack):
            return mozpath.normpath(mozpath.join(
                mozpath.dirname(self._execution_stack[-1]), path))
        else:
            return mozpath.normpath(mozpath.join(self.topsrcdir, path))
Exemplo n.º 29
0
    def generateXpcomXpt(self, config, targetPath, files, cache_dir):
        if not self.targetNeedBuild(targetPath):
            return
        xpts = []
        includePaths = [mozpath.join(config.topobjdir, 'dist/idl'),
            mozpath.join(self.libxul_sdk, 'idl')]

        import xpidl
        import xpt
        import typelib
        deps = set()
        p = xpidl.IDLParser(outputdir=cache_dir)
        for f in files:
            idl_data = open(f).read()
            filename =  mozpath.join('../../../dist/idl', os.path.basename(f))

            idl = p.parse(idl_data, filename = filename)
            idl.resolve(includePaths, p)
            xptIo = io.BytesIO()
            typelib.write_typelib(idl, xptIo, filename = filename)
            xptIo.seek(0)
            xpts.append(xptIo)

            self.updateIdlDeps(config, idl.deps, deps)

        print("Generating %s" % targetPath)
        xpt.xpt_link(xpts).write(targetPath)
        self.addDependencies(targetPath, deps)
        self.addDependencies(targetPath, [targetPath])
Exemplo n.º 30
0
def get_conf(conf_file):
    conf = Configuration(conf_file)
    inc_dir = [
        mozpath.join(buildconfig.topsrcdir, 'accessible', 'interfaces'),
        mozpath.join(buildconfig.topsrcdir, 'xpcom', 'base'),
    ]
    return conf, inc_dir
Exemplo n.º 31
0
    def convert_support_files(self, test, install_root, manifest_dir, out_dir):
        # Arguments:
        #  test - The test object to process.
        #  install_root - The directory under $objdir/_tests that will contain
        #                 the tests for this harness (examples are "testing/mochitest",
        #                 "xpcshell").
        #  manifest_dir - Absoulute path to the (srcdir) directory containing the
        #                 manifest that included this test
        #  out_dir - The path relative to $objdir/_tests used as the destination for the
        #            test, based on the relative path to the manifest in the srcdir,
        #            the install_root, and 'install-to-subdir', if present in the manifest.
        info = TestInstallInfo()
        for thing, seen in self._fields:
            value = test.get(thing, '')
            # We need to memoize on the basis of both the path and the output
            # directory for the benefit of tests specifying 'install-to-subdir'.
            if (value, out_dir) in seen:
                continue
            seen.add((value, out_dir))
            for pattern in value.split():
                if thing == 'generated-files':
                    info.external_installs.add(
                        mozpath.normpath(mozpath.join(out_dir, pattern)))
                # '!' indicates our syntax for inter-directory support file
                # dependencies. These receive special handling in the backend.
                elif pattern[0] == '!':
                    info.deferred_installs.add(pattern)
                # We only support globbing on support-files because
                # the harness doesn't support * for head and tail.
                elif '*' in pattern and thing == 'support-files':
                    info.pattern_installs.append(
                        (manifest_dir, pattern, out_dir))
                # "absolute" paths identify files that are to be
                # placed in the install_root directory (no globs)
                elif pattern[0] == '/':
                    full = mozpath.normpath(
                        mozpath.join(manifest_dir, mozpath.basename(pattern)))
                    info.installs.append(
                        (full, mozpath.join(install_root, pattern[1:])))
                else:
                    full = mozpath.normpath(mozpath.join(
                        manifest_dir, pattern))
                    dest_path = mozpath.join(out_dir, pattern)

                    # If the path resolves to a different directory
                    # tree, we take special behavior depending on the
                    # entry type.
                    if not full.startswith(manifest_dir):
                        # If it's a support file, we install the file
                        # into the current destination directory.
                        # This implementation makes installing things
                        # with custom prefixes impossible. If this is
                        # needed, we can add support for that via a
                        # special syntax later.
                        if thing == 'support-files':
                            dest_path = mozpath.join(out_dir,
                                                     os.path.basename(pattern))
                        # If it's not a support file, we ignore it.
                        # This preserves old behavior so things like
                        # head files doesn't get installed multiple
                        # times.
                        else:
                            continue
                    info.installs.append((full, mozpath.normpath(dest_path)))
        return info
Exemplo n.º 32
0
from mozbuild.configure.options import (
    InvalidOptionError,
    NegativeOptionValue,
    PositiveOptionValue,
)
from mozbuild.configure import (
    ConfigureError,
    ConfigureSandbox,
)
from mozbuild.util import exec_

import mozpack.path as mozpath

test_data_path = mozpath.abspath(mozpath.dirname(__file__))
test_data_path = mozpath.join(test_data_path, 'data')


class TestConfigure(unittest.TestCase):
    def get_config(self,
                   options=[],
                   env={},
                   configure='moz.configure',
                   prog='/bin/configure'):
        config = {}
        out = StringIO()
        sandbox = ConfigureSandbox(config, env, [prog] + options, out, out)

        sandbox.run(mozpath.join(test_data_path, configure))

        if '--help' not in options:
Exemplo n.º 33
0
    def add_wpt_manifest_data(self):
        """Adds manifest data for web-platform-tests into the list of available tests.

        Upon invocation, this method will download from firefox-ci the most recent
        version of the web-platform-tests manifests.

        Once manifest is downloaded, this method will add details about each test
        into the list of available tests.
        """
        if self._wpt_loaded:
            return

        self._reset_state()

        wpt_path = os.path.join(self.topsrcdir, "testing", "web-platform")
        sys.path = [wpt_path] + sys.path

        import manifestupdate
        import logging
        logger = logging.getLogger("manifestupdate")
        logger.disabled = True

        manifests = manifestupdate.run(self.topsrcdir,
                                       self.topobjdir,
                                       rebuild=False,
                                       download=True,
                                       config_path=None,
                                       rewrite_config=True,
                                       update=True,
                                       logger=logger)
        if not manifests:
            print("Loading wpt manifest failed")
            return

        for manifest, data in six.iteritems(manifests):
            tests_root = data[
                "tests_path"]  # full path on disk until web-platform tests directory

            for test_type, path, tests in manifest:
                full_path = mozpath.join(tests_root, path)
                src_path = mozpath.relpath(full_path, self.topsrcdir)
                if test_type not in [
                        "testharness", "reftest", "wdspec", "crashtest"
                ]:
                    continue

                full_path = mozpath.join(tests_root,
                                         path)  # absolute path on disk
                src_path = mozpath.relpath(full_path, self.topsrcdir)

                for test in tests:
                    testobj = {
                        "head": "",
                        "support-files": "",
                        "path": full_path,
                        "flavor": "web-platform-tests",
                        "subsuite": test_type,
                        "here": mozpath.dirname(path),
                        "name": test.id,
                        "file_relpath": src_path,
                        "srcdir_relpath": src_path,
                        "dir_relpath": mozpath.dirname(src_path),
                    }
                    group = self.get_wpt_group(testobj)
                    testobj["manifest"] = group

                    test_root = "tests"
                    if group.startswith("/_mozilla"):
                        test_root = os.path.join("mozilla", "tests")
                        group = group[len("/_mozilla"):]

                    group = group.lstrip("/")
                    testobj["manifest_relpath"] = os.path.join(
                        wpt_path, test_root, group)
                    self._tests.append(testobj)

        self._wpt_loaded = True
Exemplo n.º 34
0
def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()):
    app = LocaleManifestFinder(app_finder)
    l10n = LocaleManifestFinder(l10n_finder)

    # The code further below assumes there's only one locale replaced with
    # another one.
    if len(app.locales) > 1:
        errors.fatal("Multiple app locales aren't supported: " +
                     ",".join(app.locales))
    if len(l10n.locales) > 1:
        errors.fatal("Multiple l10n locales aren't supported: " +
                     ",".join(l10n.locales))
    locale = app.locales[0]
    l10n_locale = l10n.locales[0]

    # For each base directory, store what path a locale chrome package name
    # corresponds to.
    # e.g., for the following entry under app/chrome:
    #     locale foo en-US path/to/files
    # keep track that the locale path for foo in app is
    # app/chrome/path/to/files.
    l10n_paths = {}
    for e in l10n.entries:
        if isinstance(e, ManifestChrome):
            base = mozpath.basedir(e.path, app.bases)
            l10n_paths.setdefault(base, {})
            l10n_paths[base][e.name] = e.path

    # For chrome and non chrome files or directories, store what langpack path
    # corresponds to a package path.
    paths = {}
    for e in app.entries:
        if isinstance(e, ManifestEntryWithRelPath):
            base = mozpath.basedir(e.path, app.bases)
            if base not in l10n_paths:
                errors.fatal("Locale doesn't contain %s/" % base)
                # Allow errors to accumulate
                continue
            if e.name not in l10n_paths[base]:
                errors.fatal("Locale doesn't have a manifest entry for '%s'" %
                             e.name)
                # Allow errors to accumulate
                continue
            paths[e.path] = l10n_paths[base][e.name]

    for pattern in non_chrome:
        for base in app.bases:
            path = mozpath.join(base, pattern)
            left = set(p for p, f in app_finder.find(path))
            right = set(p for p, f in l10n_finder.find(path))
            for p in right:
                paths[p] = p
            for p in left - right:
                paths[p] = None

    # Create a new package, with non localized bits coming from the original
    # package, and localized bits coming from the langpack.
    packager = SimplePackager(formatter)
    for p, f in app_finder:
        if is_manifest(p):
            # Remove localized manifest entries.
            for e in [e for e in f if e.localized]:
                f.remove(e)
        # If the path is one that needs a locale replacement, use the
        # corresponding file from the langpack.
        path = None
        if p in paths:
            path = paths[p]
            if not path:
                continue
        else:
            base = mozpath.basedir(p, paths.keys())
            if base:
                subpath = mozpath.relpath(p, base)
                path = mozpath.normpath(mozpath.join(paths[base], subpath))
        if path:
            files = [f for p, f in l10n_finder.find(path)]
            if not len(files):
                if base not in non_chrome:
                    finderBase = ""
                    if hasattr(l10n_finder, 'base'):
                        finderBase = l10n_finder.base
                    errors.error("Missing file: %s" %
                                 os.path.join(finderBase, path))
            else:
                packager.add(path, files[0])
        else:
            packager.add(p, f)

    # Add localized manifest entries from the langpack.
    l10n_manifests = []
    for base in set(e.base for e in l10n.entries):
        m = ManifestFile(base, [e for e in l10n.entries if e.base == base])
        path = mozpath.join(base, 'chrome.%s.manifest' % l10n_locale)
        l10n_manifests.append((path, m))
    bases = packager.get_bases()
    for path, m in l10n_manifests:
        base = mozpath.basedir(path, bases)
        packager.add(path, m)
        # Add a "manifest $path" entry in the top manifest under that base.
        m = ManifestFile(base)
        m.add(Manifest(base, mozpath.relpath(path, base)))
        packager.add(mozpath.join(base, 'chrome.manifest'), m)

    packager.close()

    # Add any remaining non chrome files.
    for pattern in non_chrome:
        for base in bases:
            for p, f in l10n_finder.find(mozpath.join(base, pattern)):
                if not formatter.contains(p):
                    formatter.add(p, f)

    # Transplant jar preloading information.
    for path, log in app_finder.jarlogs.iteritems():
        assert isinstance(copier[path], Jarrer)
        copier[path].preload([l.replace(locale, l10n_locale) for l in log])
Exemplo n.º 35
0
def find_files(archive):
    extra_entries = []
    generated_harness_files = find_generated_harness_files()

    if archive == 'common':
        # Construct entries ensuring all our generated harness files are
        # packaged in the common tests archive.
        packaged_paths = set()
        for entry in OBJDIR_TEST_FILES.values():
            pat = mozpath.join(entry['base'], entry['pattern'])
            del entry['pattern']
            patterns = []
            for path in generated_harness_files:
                if mozpath.match(path, pat):
                    patterns.append(path[len(entry['base']) + 1:])
                    packaged_paths.add(path)
            if patterns:
                entry['patterns'] = patterns
                extra_entries.append(entry)
        entry = {
            'source': buildconfig.topobjdir,
            'base': '_tests',
            'patterns': [],
        }
        for path in set(generated_harness_files) - packaged_paths:
            entry['patterns'].append(path[len('_tests') + 1:])
        extra_entries.append(entry)

    for entry in ARCHIVE_FILES[archive] + extra_entries:
        source = entry['source']
        dest = entry.get('dest')
        base = entry.get('base', '')

        pattern = entry.get('pattern')
        patterns = entry.get('patterns', [])
        if pattern:
            patterns.append(pattern)

        manifest = entry.get('manifest')
        manifests = entry.get('manifests', [])
        if manifest:
            manifests.append(manifest)
        if manifests:
            dirs = find_manifest_dirs(buildconfig.topsrcdir, manifests)
            patterns.extend({'{}/**'.format(d) for d in dirs})

        ignore = list(entry.get('ignore', []))
        ignore.extend([
            '**/.flake8',
            '**/.mkdir.done',
            '**/*.pyc',
        ])

        if archive not in ('common', 'updater-dep') and base.startswith('_tests'):
            # We may have generated_harness_files to exclude from this entry.
            for path in generated_harness_files:
                if path.startswith(base):
                    ignore.append(path[len(base) + 1:])

        common_kwargs = {
            'find_dotfiles': True,
            'ignore': ignore,
        }

        finder = FileFinder(os.path.join(source, base), **common_kwargs)

        for pattern in patterns:
            for p, f in finder.find(pattern):
                if dest:
                    p = mozpath.join(dest, p)
                yield p, f
Exemplo n.º 36
0
import time

from manifestparser import TestManifest
from reftest import ReftestManifest

from mozbuild.util import ensureParentDir
from mozpack.archive import create_tar_gz_from_files
from mozpack.copier import FileRegistry
from mozpack.files import ExistingFile, FileFinder
from mozpack.manifests import InstallManifest
from mozpack.mozjar import JarWriter
import mozpack.path as mozpath

import buildconfig

STAGE = mozpath.join(buildconfig.topobjdir, 'dist', 'test-stage')

TEST_HARNESS_BINS = [
    'BadCertServer',
    'GenerateOCSPResponse',
    'OCSPStaplingServer',
    'SymantecSanctionsServer',
    'SmokeDMD',
    'certutil',
    'crashinject',
    'fileid',
    'geckodriver',
    'minidumpwriter',
    'pk12util',
    'screenshot',
    'screentopng',
Exemplo n.º 37
0
    def read_mozconfig(self, path=None):
        """Read the contents of a mozconfig into a data structure.

        This takes the path to a mozconfig to load. If the given path is
        AUTODETECT, will try to find a mozconfig from the environment using
        find_mozconfig().

        mozconfig files are shell scripts. So, we can't just parse them.
        Instead, we run the shell script in a wrapper which allows us to record
        state from execution. Thus, the output from a mozconfig is a friendly
        static data structure.
        """
        if path is self.AUTODETECT:
            path = find_mozconfig(self.topsrcdir)

        result = {
            "path": path,
            "topobjdir": None,
            "configure_args": None,
            "make_flags": None,
            "make_extra": None,
            "env": None,
            "vars": None,
        }

        if path is None:
            if "MOZ_OBJDIR" in os.environ:
                result["topobjdir"] = os.environ["MOZ_OBJDIR"]
            return result

        path = mozpath.normsep(path)

        result["configure_args"] = []
        result["make_extra"] = []
        result["make_flags"] = []

        # Since mozconfig_loader is a shell script, running it "normally"
        # actually leads to two shell executions on Windows. Avoid this by
        # directly calling sh mozconfig_loader.
        shell = "sh"
        if "MOZILLABUILD" in os.environ:
            shell = os.environ["MOZILLABUILD"] + "/msys/bin/sh"
        if sys.platform == "win32":
            shell = shell + ".exe"

        command = [
            shell,
            mozpath.normsep(self._loader_script),
            mozpath.normsep(self.topsrcdir),
            path,
            sys.executable,
            mozpath.join(mozpath.dirname(self._loader_script), "action",
                         "dump_env.py"),
        ]

        try:
            env = dict(os.environ)
            env["PYTHONIOENCODING"] = "utf-8"
            # We need to capture stderr because that's where the shell sends
            # errors if execution fails.
            output = six.ensure_text(
                subprocess.check_output(
                    command,
                    stderr=subprocess.STDOUT,
                    cwd=self.topsrcdir,
                    env=env,
                    universal_newlines=True,
                ))
        except subprocess.CalledProcessError as e:
            lines = e.output.splitlines()

            # Output before actual execution shouldn't be relevant.
            try:
                index = lines.index("------END_BEFORE_SOURCE")
                lines = lines[index + 1:]
            except ValueError:
                pass

            raise MozconfigLoadException(path, MOZCONFIG_BAD_EXIT_CODE, lines)

        try:
            parsed = self._parse_loader_output(output)
        except AssertionError:
            # _parse_loader_output uses assertions to verify the
            # well-formedness of the shell output; when these fail, it
            # generally means there was a problem with the output, but we
            # include the assertion traceback just to be sure.
            print("Assertion failed in _parse_loader_output:")
            traceback.print_exc()
            raise MozconfigLoadException(path, MOZCONFIG_BAD_OUTPUT,
                                         output.splitlines())

        def diff_vars(vars_before, vars_after):
            set1 = set(vars_before.keys()) - self.IGNORE_SHELL_VARIABLES
            set2 = set(vars_after.keys()) - self.IGNORE_SHELL_VARIABLES
            added = set2 - set1
            removed = set1 - set2
            maybe_modified = set1 & set2
            changed = {
                "added": {},
                "removed": {},
                "modified": {},
                "unmodified": {}
            }

            for key in added:
                changed["added"][key] = vars_after[key]

            for key in removed:
                changed["removed"][key] = vars_before[key]

            for key in maybe_modified:
                if vars_before[key] != vars_after[key]:
                    changed["modified"][key] = (vars_before[key],
                                                vars_after[key])
                elif key in self.ENVIRONMENT_VARIABLES:
                    # In order for irrelevant environment variable changes not
                    # to incur in re-running configure, only a set of
                    # environment variables are stored when they are
                    # unmodified. Otherwise, changes such as using a different
                    # terminal window, or even rebooting, would trigger
                    # reconfigures.
                    changed["unmodified"][key] = vars_after[key]

            return changed

        result["env"] = diff_vars(parsed["env_before"], parsed["env_after"])

        # Environment variables also appear as shell variables, but that's
        # uninteresting duplication of information. Filter them out.
        def filt(x, y):
            return {k: v for k, v in x.items() if k not in y}

        result["vars"] = diff_vars(
            filt(parsed["vars_before"], parsed["env_before"]),
            filt(parsed["vars_after"], parsed["env_after"]),
        )

        result["configure_args"] = [self._expand(o) for o in parsed["ac"]]

        if "MOZ_OBJDIR" in parsed["env_before"]:
            result["topobjdir"] = parsed["env_before"]["MOZ_OBJDIR"]

        mk = [self._expand(o) for o in parsed["mk"]]

        for o in mk:
            match = self.RE_MAKE_VARIABLE.match(o)

            if match is None:
                result["make_extra"].append(o)
                continue

            name, value = match.group("var"), match.group("value")

            if name == "MOZ_MAKE_FLAGS":
                result["make_flags"] = value.split()
                continue

            if name == "MOZ_OBJDIR":
                result["topobjdir"] = value
                if parsed["env_before"].get("MOZ_PROFILE_GENERATE") == "1":
                    # If MOZ_OBJDIR is specified in the mozconfig, we need to
                    # make sure that the '/instrumented' directory gets appended
                    # for the first build to avoid an objdir mismatch when
                    # running 'mach package' on Windows.
                    result["topobjdir"] = mozpath.join(result["topobjdir"],
                                                       "instrumented")
                continue

            result["make_extra"].append(o)

        return result
Exemplo n.º 38
0
        'manifest chrome/chrome.manifest',
        'manifest components/components.manifest',
    ],
    'app/chrome/chrome.manifest': [
        'content content foo/',
    ],
    'app/chrome/foo/foo': FILES['app/chrome/foo/foo'],
    'app/components/components.manifest': [
        'component {foo-id} foo.js',
    ],
    'app/components/foo.js': FILES['app/components/foo.js'],
}

for addon in ('addon0', 'addon1'):
    RESULT_FLAT.update({
        mozpath.join(addon, p): f
        for p, f in {
            'chrome.manifest': [
                'manifest chrome/chrome.manifest',
                'manifest components/components.manifest',
            ],
            'chrome/chrome.manifest': [
                'content content foo/bar/',
            ],
            'chrome/foo/bar/baz': FILES[mozpath.join(addon, 'chrome/foo/bar/baz')],
            'components/components.manifest': [
                'interfaces bar.xpt',
                'interfaces foo.xpt',
            ],
            'components/bar.xpt': bar_xpt,
            'components/foo.xpt': foo2_xpt,
Exemplo n.º 39
0
def read_reftest_manifest(context, manifest_path):
    import reftest
    path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
    manifest = reftest.ReftestManifest(finder=context._finder)
    manifest.load(path)
    return manifest
Exemplo n.º 40
0
    def setup_vscode(self):
        vscode_settings = mozpath.join(self.topsrcdir, ".vscode",
                                       "settings.json")

        clangd_cc_path = mozpath.join(self.topobjdir, "clangd")

        # Verify if the required files are present
        clang_tools_path = mozpath.join(self._mach_context.state_dir,
                                        "clang-tools")
        clang_tidy_bin = mozpath.join(clang_tools_path, "clang-tidy", "bin")

        clangd_path = mozpath.join(
            clang_tidy_bin,
            "clangd" + self.config_environment.substs.get("BIN_SUFFIX", ""),
        )

        if not os.path.exists(clangd_path):
            self.log(logging.ERROR, "ide", {},
                     "Unable to locate clangd in {}.".format(clang_tidy_bin))
            rc = self._get_clang_tools(clang_tools_path)

            if rc != 0:
                return rc

        import multiprocessing
        import json
        from mozbuild.code_analysis.utils import ClangTidyConfig

        clang_tidy_cfg = ClangTidyConfig(self.topsrcdir)

        clangd_json = json.loads("""
        {
            "clangd.path": "%s",
            "clangd.arguments": [
                "--compile-commands-dir",
                "%s",
                "-j",
                "%s",
                "--limit-results",
                "0",
                "--completion-style",
                "detailed",
                "--background-index",
                "--all-scopes-completion",
                "--log",
                "error",
                "--pch-storage",
                "memory",
                "--clang-tidy",
                "--clang-tidy-checks",
                "%s"
            ]
        }
        """ % (
            clangd_path,
            clangd_cc_path,
            multiprocessing.cpu_count(),
            clang_tidy_cfg.checks,
        ))

        # Create an empty settings dictionary
        settings = {}

        # Modify the .vscode/settings.json configuration file
        if os.path.exists(vscode_settings):
            # If exists prompt for a configuration change
            choice = prompt_bool(
                "Configuration for {settings} must change. "
                "Do you want to proceed?".format(settings=vscode_settings))
            if not choice:
                return 1

            # Read the original vscode settings
            with open(vscode_settings) as fh:
                try:
                    settings = json.load(fh)
                    print(
                        "The following modifications will occur:\nOriginal:\n{orig}\n"
                        "New:\n{new}".format(
                            orig=json.dumps(
                                {
                                    key:
                                    settings[key] if key in settings else ""
                                    for key in
                                    ["clangd.path", "clangd.arguments"]
                                },
                                indent=4,
                            ),
                            new=json.dumps(clangd_json, indent=4),
                        ))

                except ValueError:
                    # Decoding has failed, work with an empty dict
                    settings = {}

        # Write our own Configuration
        settings["clangd.path"] = clangd_json["clangd.path"]
        settings["clangd.arguments"] = clangd_json["clangd.arguments"]

        with open(vscode_settings, "w") as fh:
            fh.write(json.dumps(settings, indent=4))

        # Open vscode with new configuration
        rc = subprocess.call([self.vscode_path, self.topsrcdir])

        if rc != 0:
            self.log(
                logging.ERROR,
                "ide",
                {},
                "Unable to open VS Code. Please open VS Code manually and load "
                "directory: {}".format(self.topsrcdir),
            )
            return rc

        return 0
Exemplo n.º 41
0
def prepare(srcdir, objdir, shell, args):
    parser = argparse.ArgumentParser()
    parser.add_argument('--target', type=str)
    parser.add_argument('--host', type=str)
    parser.add_argument('--build', type=str)
    parser.add_argument('--cache-file', type=str)
    # The --srcdir argument is simply ignored. It's a useless autoconf feature
    # that we don't support well anyways. This makes it stripped from `others`
    # and allows to skip setting it when calling the subconfigure (configure
    # will take it from the configure path anyways).
    parser.add_argument('--srcdir', type=str)

    data_file = os.path.join(objdir, CONFIGURE_DATA)
    previous_args = None
    if os.path.exists(data_file):
        with open(data_file, 'rb') as f:
            data = pickle.load(f)
            previous_args = data['args']

    # Msys likes to break environment variables and command line arguments,
    # so read those from stdin, as they are passed from the configure script
    # when necessary (on windows).
    # However, for some reason, $PATH is not handled like other environment
    # variables, and msys remangles it even when giving it is already a msys
    # $PATH. Fortunately, the mangling/demangling is just find for $PATH, so
    # we can just take the value from the environment. Msys will convert it
    # back properly when calling subconfigure.
    input = sys.stdin.read()
    if input:
        data = {a: b for [a, b] in eval(input)}
        environ = {a: b for a, b in data['env']}
        environ['PATH'] = os.environ['PATH']
        args = data['args']
    else:
        environ = os.environ

    args, others = parser.parse_known_args(args)

    data = {
        'target': args.target,
        'host': args.host,
        'build': args.build,
        'args': others,
        'shell': shell,
        'srcdir': srcdir,
        'env': environ,
    }

    if args.cache_file:
        data['cache-file'] = mozpath.normpath(
            mozpath.join(os.getcwd(), args.cache_file))
    else:
        data['cache-file'] = mozpath.join(objdir, 'config.cache')

    if previous_args is not None:
        data['previous-args'] = previous_args

    try:
        os.makedirs(objdir)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise

    with open(data_file, 'wb') as f:
        pickle.dump(data, f)
Exemplo n.º 42
0
def run(objdir):
    ret = 0
    output = ''

    with open(os.path.join(objdir, CONFIGURE_DATA), 'rb') as f:
        data = pickle.load(f)

    data['objdir'] = objdir

    cache_file = data['cache-file']
    cleared_cache = True
    if os.path.exists(cache_file):
        cleared_cache = maybe_clear_cache(data)

    config_files, command_files = get_config_files(data)
    contents = []
    for f, t in config_files:
        contents.append(File(f))

    # AC_CONFIG_COMMANDS actually only registers tags, not file names
    # but most commands are tagged with the file name they create.
    # However, a few don't, or are tagged with a directory name (and their
    # command is just to create that directory)
    for f in command_files:
        if os.path.isfile(f):
            contents.append(File(f))

    # Only run configure if one of the following is true:
    # - config.status doesn't exist
    # - config.status is older than configure
    # - the configure arguments changed
    # - the environment changed in a way that requires a cache clear.
    configure = mozpath.join(data['srcdir'], 'configure')
    config_status_path = mozpath.join(objdir, 'config.status')
    skip_configure = True
    if not os.path.exists(config_status_path):
        skip_configure = False
        config_status = None
    else:
        config_status = File(config_status_path)
        if config_status.mtime < os.path.getmtime(configure) or \
                data.get('previous-args', data['args']) != data['args'] or \
                cleared_cache:
            skip_configure = False

    relobjdir = os.path.relpath(objdir, os.getcwd())

    if not skip_configure:
        command = [data['shell'], configure]
        for kind in ('target', 'build', 'host'):
            if data.get(kind) is not None:
                command += ['--%s=%s' % (kind, data[kind])]
        command += data['args']
        command += ['--cache-file=%s' % cache_file]

        # Pass --no-create to configure so that it doesn't run config.status.
        # We're going to run it ourselves.
        command += ['--no-create']

        print prefix_lines('configuring', relobjdir)
        print prefix_lines('running %s' % ' '.join(command[:-1]), relobjdir)
        sys.stdout.flush()
        try:
            output += subprocess.check_output(command,
                                              stderr=subprocess.STDOUT,
                                              cwd=objdir,
                                              env=data['env'])
        except subprocess.CalledProcessError as e:
            return relobjdir, e.returncode, e.output

        # Leave config.status with a new timestamp if configure is newer than
        # its original mtime.
        if config_status and os.path.getmtime(
                configure) <= config_status.mtime:
            config_status.update_time()

    # Only run config.status if one of the following is true:
    # - config.status changed or did not exist
    # - one of the templates for config files is newer than the corresponding
    #   config file.
    skip_config_status = True
    if not config_status or config_status.modified:
        # If config.status doesn't exist after configure (because it's not
        # an autoconf configure), skip it.
        if os.path.exists(config_status_path):
            skip_config_status = False
    else:
        # config.status changed or was created, so we need to update the
        # list of config and command files.
        config_files, command_files = get_config_files(data)
        for f, t in config_files:
            if not os.path.exists(t) or \
                    os.path.getmtime(f) < os.path.getmtime(t):
                skip_config_status = False

    if not skip_config_status:
        if skip_configure:
            print prefix_lines('running config.status', relobjdir)
            sys.stdout.flush()
        try:
            output += subprocess.check_output(
                [data['shell'], '-c', './config.status'],
                stderr=subprocess.STDOUT,
                cwd=objdir,
                env=data['env'])
        except subprocess.CalledProcessError as e:
            ret = e.returncode
            output += e.output

        for f in contents:
            f.update_time()

    return relobjdir, ret, output
Exemplo n.º 43
0
    def import_pr(self,
                  pull_request,
                  bug_number=None,
                  bugzilla_token=None,
                  reviewer=None):
        import requests
        pr_number = None
        repository = None
        for r in PR_REPOSITORIES.values():
            if pull_request.startswith(GITHUB_ROOT + r['github'] + '/pull/'):
                # sanitize URL, dropping anything after the PR number
                pr_number = int(
                    re.search('/pull/([0-9]+)', pull_request).group(1))
                pull_request = GITHUB_ROOT + r['github'] + '/pull/' + str(
                    pr_number)
                repository = r
                break

        if repository is None:
            self.log(
                logging.ERROR, 'unrecognized_repo', {},
                'The pull request URL was not recognized; add it to the list of '
                'recognized repos in PR_REPOSITORIES in %s' % __file__)
            sys.exit(1)

        self.log(logging.INFO, 'import_pr', {'pr_url': pull_request},
                 'Attempting to import {pr_url}')
        dirty = [
            f for f in self.repository.get_changed_files(mode='all')
            if f.startswith(repository['path'])
        ]
        if dirty:
            self.log(logging.ERROR, 'dirty_tree', repository,
                     'Local {path} tree is dirty; aborting!')
            sys.exit(1)
        target_dir = mozpath.join(self.topsrcdir,
                                  os.path.normpath(repository['path']))

        if bug_number is None:
            if bugzilla_token is None:
                self.log(
                    logging.WARNING, 'no_token', {},
                    'No bug number or bugzilla API token provided; bug number will not '
                    'be added to commit messages.')
            else:
                bug_number = self._file_bug(bugzilla_token, repository,
                                            pr_number)
        elif bugzilla_token is not None:
            self.log(
                logging.WARNING, 'too_much_bug', {},
                'Providing a bugzilla token is unnecessary when a bug number is provided. '
                'Using bug number; ignoring token.')

        pr_patch = requests.get(pull_request + '.patch')
        pr_patch.raise_for_status()
        for patch in self._split_patches(pr_patch.content, bug_number,
                                         pull_request, reviewer):
            self.log(
                logging.INFO, 'commit_msg', patch,
                'Processing commit [{commit_summary}] by [{author}] at [{date}]'
            )
            patch_cmd = subprocess.Popen(['patch', '-p1', '-s'],
                                         stdin=subprocess.PIPE,
                                         cwd=target_dir)
            patch_cmd.stdin.write(patch['diff'])
            patch_cmd.stdin.close()
            patch_cmd.wait()
            if patch_cmd.returncode != 0:
                self.log(
                    logging.ERROR, 'commit_fail', {},
                    'Error applying diff from commit via "patch -p1 -s". Aborting...'
                )
                sys.exit(patch_cmd.returncode)
            self.repository.commit(patch['commit_msg'], patch['author'],
                                   patch['date'], [target_dir])
            self.log(logging.INFO, 'commit_pass', {},
                     'Committed successfully.')
Exemplo n.º 44
0
def test_migration(cmd, obj_dir, to_test, references):
    '''Test the given recipe.

    This creates a workdir by l10n-merging gecko-strings and the m-c source,
    to mimmic gecko-strings after the patch to test landed.
    It then runs the recipe with a gecko-strings clone as localization, both
    dry and wet.
    It inspects the generated commits, and shows a diff between the merged
    reference and the generated content.
    The diff is intended to be visually inspected. Some changes might be
    expected, in particular when formatting of the en-US strings is different.
    '''
    rv = 0
    migration_name = os.path.splitext(os.path.split(to_test)[1])[0]
    work_dir = mozpath.join(obj_dir, migration_name)
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(mozpath.join(work_dir, 'reference'))
    l10n_toml = mozpath.join(cmd.topsrcdir, 'browser', 'locales', 'l10n.toml')
    pc = TOMLParser().parse(l10n_toml, env={'l10n_base': work_dir})
    pc.set_locales(['reference'])
    files = ProjectFiles('reference', [pc])
    for ref in references:
        if ref != mozpath.normpath(ref):
            cmd.log(
                logging.ERROR, 'fluent-migration-test', {
                    'file': to_test,
                    'ref': ref,
                }, 'Reference path "{ref}" needs to be normalized for {file}')
            rv = 1
            continue
        full_ref = mozpath.join(work_dir, 'reference', ref)
        m = files.match(full_ref)
        if m is None:
            raise ValueError("Bad reference path: " + ref)
        m_c_path = m[1]
        g_s_path = mozpath.join(work_dir, 'gecko-strings', ref)
        resources = [
            b'' if not os.path.exists(f) else open(f, 'rb').read()
            for f in (g_s_path, m_c_path)
        ]
        ref_dir = os.path.dirname(full_ref)
        if not os.path.exists(ref_dir):
            os.makedirs(ref_dir)
        open(full_ref, 'wb').write(merge_channels(ref, resources))
    client = hglib.clone(source=mozpath.join(get_state_dir(), 'gecko-strings'),
                         dest=mozpath.join(work_dir, 'en-US'))
    client.open()
    old_tip = client.tip().node
    run_migration = [
        cmd._virtualenv_manager.python_path, '-m', 'fluent.migrate.tool',
        '--lang', 'en-US', '--reference-dir',
        mozpath.join(work_dir, 'reference'), '--localization-dir',
        mozpath.join(work_dir, 'en-US'), '--dry-run',
        'fluent_migrations.' + migration_name
    ]
    cmd.run_process(
        run_migration,
        cwd=work_dir,
        line_handler=print,
    )
    # drop --dry-run
    run_migration.pop(-2)
    cmd.run_process(
        run_migration,
        cwd=work_dir,
        line_handler=print,
    )
    tip = client.tip().node
    if old_tip == tip:
        cmd.log(logging.WARN, 'fluent-migration-test', {
            'file': to_test,
        }, 'No migration applied for {file}')
        return rv
    for ref in references:
        diff_resources(
            mozpath.join(work_dir, 'reference', ref),
            mozpath.join(work_dir, 'en-US', ref),
        )
    messages = [l.desc for l in client.log('::{} - ::{}'.format(tip, old_tip))]
    bug = re.search('[0-9]{5,}', migration_name).group()
    # Just check first message for bug number, they're all following the same pattern
    if bug not in messages[0]:
        rv = 1
        cmd.log(logging.ERROR, 'fluent-migration-test', {
            'file': to_test,
        }, 'Missing or wrong bug number for {file}')
    if any('part {}'.format(n + 1) not in msg
           for n, msg in enumerate(messages)):
        rv = 1
        cmd.log(logging.ERROR, 'fluent-migration-test', {
            'file': to_test,
        }, 'Commit messages should have "part {{index}}" for {file}')
    return rv
Exemplo n.º 45
0
 def file_path(self, name, *args):
     return mozpath.join(data_path, name, *args)
Exemplo n.º 46
0
    UnifiedSources,
    VariablePassthru,
)
from mozbuild.frontend.emitter import TreeMetadataEmitter
from mozbuild.frontend.reader import (
    BuildReader,
    BuildReaderError,
    SandboxValidationError,
)

from mozbuild.test.common import MockConfig

import mozpack.path as mozpath

data_path = mozpath.abspath(mozpath.dirname(__file__))
data_path = mozpath.join(data_path, 'data')


class TestEmitterBasic(unittest.TestCase):
    def setUp(self):
        self._old_env = dict(os.environ)
        os.environ.pop('MOZ_OBJDIR', None)

    def tearDown(self):
        os.environ.clear()
        os.environ.update(self._old_env)

    def reader(self, name):
        config = MockConfig(mozpath.join(data_path, name),
                            extra_substs=dict(
                                ENABLE_TESTS='1',
def get_data_file(data_dir):
    files = glob.glob(mozpath.join(data_dir, 'icudt*.dat'))
    return files[0] if files else None
Exemplo n.º 48
0
#!/usr/bin/env python
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import sys
import os

import buildconfig
import mozpack.path as mozpath

# The xpidl parser is not incorporated in the in-tree virtualenv.
xpidl_dir = mozpath.join(buildconfig.topsrcdir, 'xpcom', 'idl-parser',
                         'xpidl')
sys.path.append(xpidl_dir)
import xpidl

# Instantiate the parser.
p = xpidl.IDLParser()

def findIDL(includePath, interfaceFileName):
    for d in includePath:
        path = mozpath.join(d, interfaceFileName)
        if os.path.exists(path):
            return path
    raise BaseException("No IDL file found for interface %s "
                        "in include path %r"
                        % (interfaceFileName, includePath))

def loadEventIDL(parser, includePath, eventname):
Exemplo n.º 49
0
 def _tree_paths(self, topdir, filename):
     for dirpath, dirnames, filenames in os.walk(topdir):
         for f in filenames:
             if f == filename:
                 yield mozpath.relpath(mozpath.join(dirpath, f), topdir)
Exemplo n.º 50
0
    def config(self, name, **kwargs):
        path = mozpath.join(data_path, name)

        return MockConfig(path, **kwargs)
Exemplo n.º 51
0
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
                       non_unified_sources, action_overrides):
    flat_list, targets, data = gyp_result
    no_chromium = gyp_dir_attrs.no_chromium
    no_unified = gyp_dir_attrs.no_unified

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets,
                                        path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
            target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir = mozpath.relpath(mozpath.dirname(build_file),
                                 mozpath.dirname(path))
        subdir = '%s_%s' % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(
            config,
            mozpath.relpath(mozpath.join(output, reldir, subdir),
                            config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]['included_files']:
            context.add_source(
                mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs.get('MOZ_DEBUG') else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if 'actions' in spec:
            handle_actions(spec['actions'], context, action_overrides)
        if 'copies' in spec:
            handle_copies(spec['copies'], context)

        use_libs = []
        libs = []

        def add_deps(s):
            for t in s.get('dependencies', []) + s.get('dependencies_original',
                                                       []):
                ty = targets[t]['type']
                if ty in ('static_library', 'shared_library'):
                    use_libs.append(targets[t]['target_name'])
                # Manually expand out transitive dependencies--
                # gyp won't do this for static libs or none targets.
                if ty in ('static_library', 'none'):
                    add_deps(targets[t])
            libs.extend(spec.get('libraries', []))

        # XXX: this sucks, but webrtc breaks with this right now because
        # it builds a library called 'gtest' and we just get lucky
        # that it isn't in USE_LIBS by that name anywhere.
        if no_chromium:
            add_deps(spec)

        os_libs = []
        for l in libs:
            if l.startswith('-'):
                os_libs.append(l)
            elif l.endswith('.lib'):
                os_libs.append(l[:-4])
            elif l:
                # For library names passed in from moz.build.
                use_libs.append(os.path.basename(l))

        if spec['type'] == 'none':
            if not ('actions' in spec or 'copies' in spec):
                continue
        elif spec['type'] in ('static_library', 'shared_library',
                              'executable'):
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if spec['type'] in ('static_library', 'shared_library'):
                if name.startswith('lib'):
                    name = name[3:]
                # The context expects an unicode string.
                context['LIBRARY_NAME'] = name.decode('utf-8')
            else:
                context['PROGRAM'] = name.decode('utf-8')
            if spec['type'] == 'shared_library':
                context['FORCE_SHARED_LIB'] = True
            elif spec['type'] == 'static_library' and \
                    spec.get('variables', {}).get('no_expand_libs', '0') == '1':
                # PSM links a NSS static library, but our folded libnss
                # doesn't actually export everything that all of the
                # objects within would need, so that one library
                # should be built as a real static library.
                context['NO_EXPAND_LIBS'] = True
            if use_libs:
                context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower())
            if os_libs:
                context['OS_LIBS'] = os_libs
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            use_defines_in_asflags = False
            for f in spec.get('sources', []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                if f.startswith('$INTERMEDIATE_DIR/'):
                    s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/',
                                                      '!'))
                else:
                    s = SourcePath(context, f)
                if ext == '.h':
                    continue
                if ext == '.def':
                    context['SYMBOLS_FILE'] = s
                elif ext != '.S' and not no_unified and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)
                # The Mozilla build system doesn't use DEFINES for building
                # ASFILES.
                if ext == '.s':
                    use_defines_in_asflags = True

            # The context expects alphabetical order when adding sources
            context['SOURCES'] = alphabetical_sorted(sources)
            context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            defines = target_conf.get('defines', [])
            if config.substs['CC_TYPE'] == 'clang-cl' and no_chromium:
                msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
                defines.extend(msvs_settings.GetComputedDefines(c))
            for define in defines:
                if '=' in define:
                    name, value = define.split('=', 1)
                    # The NSS gyp file doesn't expose a way to override this
                    # currently, so we do so here.
                    if name == 'NSS_ALLOW_SSLKEYLOGFILE' and \
                            config.substs.get('RELEASE_OR_BETA', False):
                        continue
                    context['DEFINES'][name] = value
                else:
                    context['DEFINES'][define] = True

            product_dir_dist = '$PRODUCT_DIR/dist/'
            for include in target_conf.get('include_dirs', []):
                if include.startswith(product_dir_dist):
                    # special-case includes of <(PRODUCT_DIR)/dist/ to match
                    # handle_copies above. This is used for NSS' exports.
                    include = '!/dist/include/' + include[len(product_dir_dist
                                                              ):]
                elif include.startswith(config.topobjdir):
                    # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
                    include = '!/' + mozpath.relpath(include, config.topobjdir)
                else:
                    # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                    #
                    # NB: gyp files sometimes have actual absolute paths (e.g.
                    # /usr/include32) and sometimes paths that moz.build considers
                    # absolute, i.e. starting from topsrcdir. There's no good way
                    # to tell them apart here, and the actual absolute paths are
                    # likely bogus. In any event, actual absolute paths will be
                    # filtered out by trying to find them in topsrcdir.
                    #
                    # We do allow !- and %-prefixed paths, assuming they come
                    # from moz.build and will be handled the same way as if they
                    # were given to LOCAL_INCLUDES in moz.build.
                    if include.startswith('/'):
                        resolved = mozpath.abspath(
                            mozpath.join(config.topsrcdir, include[1:]))
                    elif not include.startswith(('!', '%')):
                        resolved = mozpath.abspath(
                            mozpath.join(mozpath.dirname(build_file), include))
                    if not include.startswith(
                        ('!', '%')) and not os.path.exists(resolved):
                        continue
                context['LOCAL_INCLUDES'] += [include]

            context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
            if use_defines_in_asflags and defines:
                context['ASFLAGS'] += ['-D' + d for d in defines]
            if config.substs['OS_TARGET'] == 'SunOS':
                context['LDFLAGS'] = target_conf.get('ldflags', [])
            flags = target_conf.get('cflags_mozilla', [])
            if flags:
                suffix_map = {
                    '.c': 'CFLAGS',
                    '.cpp': 'CXXFLAGS',
                    '.cc': 'CXXFLAGS',
                    '.m': 'CMFLAGS',
                    '.mm': 'CMMFLAGS',
                }
                variables = (suffix_map[e] for e in extensions
                             if e in suffix_map)
                for var in variables:
                    for f in flags:
                        # We may be getting make variable references out of the
                        # gyp data, and we don't want those in emitted data, so
                        # substitute them with their actual value.
                        f = expand_variables(f, config.substs).split()
                        if not f:
                            continue
                        # the result may be a string or a list.
                        if isinstance(f, types.StringTypes):
                            context[var].append(f)
                        else:
                            context[var].extend(f)
        else:
            # Ignore other types because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' %
                                      spec['type'])

        if not no_chromium:
            # Add some features to all contexts. Put here in case LOCAL_INCLUDES
            # order matters.
            context['LOCAL_INCLUDES'] += [
                '!/ipc/ipdl/_ipdlheaders',
                '/ipc/chromium/src',
                '/ipc/glue',
            ]
            # These get set via VC project file settings for normal GYP builds.
            if config.substs['OS_TARGET'] == 'WINNT':
                context['DEFINES']['UNICODE'] = True
                context['DEFINES']['_UNICODE'] = True
        context['COMPILE_FLAGS']['OS_INCLUDES'] = []

        for key, value in gyp_dir_attrs.sandbox_vars.items():
            if context.get(key) and isinstance(context[key], list):
                # If we have a key from sanbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context[key] = value + context[key]
            elif context.get(key) and isinstance(context[key], dict):
                context[key].update(value)
            else:
                context[key] = value

        yield context
Exemplo n.º 52
0
    def run_addon_sdk_moz_build(self, **params):
        addon_sdk_dir = mozpath.join(self.topsrcdir, 'addon-sdk')
        js_src_dir = mozpath.join(addon_sdk_dir, 'source/lib')
        dirs_to_files = {}

        for path, dirs, files in os.walk(js_src_dir):
            js_files = [f for f in files if f.endswith(('.js', '.jsm', '.html'))]
            if not js_files:
                continue

            relative = mozpath.relpath(path, js_src_dir)
            dirs_to_files[relative] = js_files

        moz_build = """# AUTOMATICALLY GENERATED FROM mozbuild.template AND mach.  DO NOT EDIT.
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

%(moz-build-template)s
if CONFIG['MOZ_WIDGET_TOOLKIT'] != "gonk":
%(non-b2g-modules)s
%(always-on-modules)s"""

        non_b2g_paths = [
            'method/test',
            'sdk/ui',
            'sdk/ui/button',
            'sdk/ui/sidebar',
            'sdk/places',
            'sdk/places/host',
            'sdk/tabs',
            'sdk/panel',
            'sdk/frame',
            'sdk/test',
            'sdk/window',
            'sdk/windows',
            'sdk/deprecated',
        ]

        non_b2g_modules = []
        always_on_modules = []

        for d, files in sorted(dirs_to_files.items()):
            if d in non_b2g_paths:
                non_b2g_modules.append((d, files))
            else:
                always_on_modules.append((d, files))

        def list_to_js_modules(l, indent=''):
            js_modules = []
            for d, files in l:
                if d == '':
                    module_path = ''
                    dir_path = ''
                else:
                    # Ensure that we don't have things like:
                    #   EXTRA_JS_MODULES.commonjs.sdk.private-browsing
                    # which would be a Python syntax error.
                    path = d.split('/')
                    module_path = ''.join('.' + p if p.find('-') == -1 else "['%s']" % p for p in path)
                    dir_path = d + '/'
                filelist = ["'source/lib/%s%s'" % (dir_path, f)
                            for f in sorted(files, key=lambda x: x.lower())]
                js_modules.append("EXTRA_JS_MODULES.commonjs%s += [\n    %s,\n]\n"
                                  % (module_path, ',\n    '.join(filelist)))
            stringified = '\n'.join(js_modules)
            # This isn't the same thing as |js_modules|, since |js_modules| had
            # embedded newlines.
            lines = stringified.split('\n')
            # Indent lines while avoiding trailing whitespace.
            lines = [indent + line if line else line for line in lines]
            return '\n'.join(lines)

        moz_build_output = mozpath.join(addon_sdk_dir, 'moz.build')
        moz_build_template = mozpath.join(addon_sdk_dir, 'mozbuild.template')
        with open(moz_build_output, 'w') as f, open(moz_build_template, 'r') as t:
            substs = { 'moz-build-template': t.read(),
                       'non-b2g-modules': list_to_js_modules(non_b2g_modules,
                                                             indent='    '),
                       'always-on-modules': list_to_js_modules(always_on_modules) }
            f.write(moz_build % substs)
Exemplo n.º 53
0
 def __init__(self, cache_dir, cache_name, cache_size, cache_callback=None, log=None, skip_cache=False):
     self._skip_cache = skip_cache
     self._cache = pylru.lrucache(cache_size, callback=cache_callback)
     self._cache_filename = mozpath.join(cache_dir, cache_name + '-cache.pickle')
     self._log = log
     mkdir(cache_dir, not_indexed=True)
Exemplo n.º 54
0
 def test_join(self):
     self.assertEqual(join("foo", "bar", "baz"), "foo/bar/baz")
     self.assertEqual(join("foo", "", "bar"), "foo/bar")
     self.assertEqual(join("", "foo", "bar"), "foo/bar")
     self.assertEqual(join("", "foo", "/bar"), "/bar")
Exemplo n.º 55
0
    def convert_support_files(self, test, install_root, manifest_dir, out_dir):
        # Arguments:
        #  test - The test object to process.
        #  install_root - The directory under $objdir/_tests that will contain
        #                 the tests for this harness (examples are "testing/mochitest",
        #                 "xpcshell").
        #  manifest_dir - Absoulute path to the (srcdir) directory containing the
        #                 manifest that included this test
        #  out_dir - The path relative to $objdir/_tests used as the destination for the
        #            test, based on the relative path to the manifest in the srcdir and
        #            the install_root.
        info = TestInstallInfo()
        for field, seen in self._fields:
            value = test.get(field, '')
            for pattern in value.split():

                # We track uniqueness locally (per test) where duplicates are forbidden,
                # and globally, where they are permitted. If a support file appears multiple
                # times for a single test, there are unnecessary entries in the manifest. But
                # many entries will be shared across tests that share defaults.
                key = field, pattern, out_dir
                if key in info.seen:
                    raise ValueError(
                        "%s appears multiple times in a test manifest under a %s field,"
                        " please omit the duplicate entry." % (pattern, field))
                info.seen.add(key)
                if key in seen:
                    continue
                seen.add(key)

                if field == 'generated-files':
                    info.external_installs.add(
                        mozpath.normpath(mozpath.join(out_dir, pattern)))
                # '!' indicates our syntax for inter-directory support file
                # dependencies. These receive special handling in the backend.
                elif pattern[0] == '!':
                    info.deferred_installs.add(pattern)
                # We only support globbing on support-files because
                # the harness doesn't support * for head.
                elif '*' in pattern and field == 'support-files':
                    info.pattern_installs.append(
                        (manifest_dir, pattern, out_dir))
                # "absolute" paths identify files that are to be
                # placed in the install_root directory (no globs)
                elif pattern[0] == '/':
                    full = mozpath.normpath(
                        mozpath.join(manifest_dir, mozpath.basename(pattern)))
                    info.installs.append(
                        (full, mozpath.join(install_root, pattern[1:])))
                else:
                    full = mozpath.normpath(mozpath.join(
                        manifest_dir, pattern))
                    dest_path = mozpath.join(out_dir, pattern)

                    # If the path resolves to a different directory
                    # tree, we take special behavior depending on the
                    # entry type.
                    if not full.startswith(manifest_dir):
                        # If it's a support file, we install the file
                        # into the current destination directory.
                        # This implementation makes installing things
                        # with custom prefixes impossible. If this is
                        # needed, we can add support for that via a
                        # special syntax later.
                        if field == 'support-files':
                            dest_path = mozpath.join(out_dir,
                                                     os.path.basename(pattern))
                        # If it's not a support file, we ignore it.
                        # This preserves old behavior so things like
                        # head files doesn't get installed multiple
                        # times.
                        else:
                            continue
                    info.installs.append((full, mozpath.normpath(dest_path)))
        return info
Exemplo n.º 56
0
from mozbuild.util import (
    expand_variables, )

# Define this module as gyp.generator.mozbuild so that gyp can use it
# as a generator under the name "mozbuild".
sys.modules['gyp.generator.mozbuild'] = sys.modules[__name__]

# build/gyp_chromium does this:
#   script_dir = os.path.dirname(os.path.realpath(__file__))
#   chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
#   sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
# We're not importing gyp_chromium, but we want both script_dir and
# chrome_src for the default includes, so go backwards from the pylib
# directory, which is the parent directory of gyp module.
chrome_src = mozpath.abspath(
    mozpath.join(mozpath.dirname(gyp.__file__), '../../../../..'))
script_dir = mozpath.join(chrome_src, 'build')


def encode(value):
    if isinstance(value, unicode):
        return value.encode('utf-8')
    return value


# Default variables gyp uses when evaluating gyp files.
generator_default_variables = {}
for dirname in [
        b'INTERMEDIATE_DIR', b'SHARED_INTERMEDIATE_DIR', b'PRODUCT_DIR',
        b'LIB_DIR', b'SHARED_LIB_DIR'
]:
Exemplo n.º 57
0
 def location(self):
     return mozpath.join(self.base, self.relpath)
Exemplo n.º 58
0
    def process_package_artifact(self, filename, processed_filename):
        tempdir = tempfile.mkdtemp()
        oldcwd = os.getcwd()
        try:
            self.log(logging.INFO, 'artifact',
                {'tempdir': tempdir},
                'Unpacking DMG into {tempdir}')
            if self._substs['HOST_OS_ARCH'] == 'Linux':
                # This is a cross build, use hfsplus and dmg tools to extract the dmg.
                os.chdir(tempdir)
                with open(os.devnull, 'wb') as devnull:
                    subprocess.check_call([
                        self._substs['DMG_TOOL'],
                        'extract',
                        filename,
                        'extracted_img',
                    ], stdout=devnull)
                    subprocess.check_call([
                        self._substs['HFS_TOOL'],
                        'extracted_img',
                        'extractall'
                    ], stdout=devnull)
            else:
                mozinstall.install(filename, tempdir)

            bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app'))
            if len(bundle_dirs) != 1:
                raise ValueError('Expected one source bundle, found: {}'.format(bundle_dirs))
            [source] = bundle_dirs

            # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
            paths_no_keep_path = ('Contents/MacOS', [
                'crashreporter.app/Contents/MacOS/crashreporter',
                'firefox',
                'firefox-bin',
                'libfreebl3.dylib',
                'liblgpllibs.dylib',
                # 'liblogalloc.dylib',
                'libmozglue.dylib',
                'libnss3.dylib',
                'libnssckbi.dylib',
                'libnssdbm3.dylib',
                'libplugin_child_interpose.dylib',
                # 'libreplace_jemalloc.dylib',
                # 'libreplace_malloc.dylib',
                'libmozavutil.dylib',
                'libmozavcodec.dylib',
                'libsoftokn3.dylib',
                'pingsender',
                'plugin-container.app/Contents/MacOS/plugin-container',
                'updater.app/Contents/MacOS/org.mozilla.updater',
                # 'xpcshell',
                'XUL',
            ])

            # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
            paths_keep_path = [
                ('Contents/MacOS', [
                    'crashreporter.app/Contents/MacOS/minidump-analyzer',
                ]),
                ('Contents/Resources', [
                    'browser/components/libbrowsercomps.dylib',
                    'dependentlibs.list',
                    # 'firefox',
                    'gmp-clearkey/0.1/libclearkey.dylib',
                    # 'gmp-fake/1.0/libfake.dylib',
                    # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
                    '**/interfaces.xpt',
                ]),
            ]

            with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer:
                root, paths = paths_no_keep_path
                finder = UnpackFinder(mozpath.join(source, root))
                for path in paths:
                    for p, f in finder.find(path):
                        self.log(logging.INFO, 'artifact',
                            {'path': p},
                            'Adding {path} to processed archive')
                        destpath = mozpath.join('bin', os.path.basename(p))
                        writer.add(destpath.encode('utf-8'), f, mode=f.mode)

                for root, paths in paths_keep_path:
                    finder = UnpackFinder(mozpath.join(source, root))
                    for path in paths:
                        for p, f in finder.find(path):
                            self.log(logging.INFO, 'artifact',
                                     {'path': p},
                                     'Adding {path} to processed archive')
                            destpath = mozpath.join('bin', p)
                            writer.add(destpath.encode('utf-8'), f.open(), mode=f.mode)

        finally:
            os.chdir(oldcwd)
            try:
                shutil.rmtree(tempdir)
            except (OSError, IOError):
                self.log(logging.WARN, 'artifact',
                    {'tempdir': tempdir},
                    'Unable to delete {tempdir}')
                pass
Exemplo n.º 59
0
 def path(self):
     return mozpath.normpath(mozpath.join(self.base, self.relpath))
Exemplo n.º 60
0
 def _example_paths(self, interface):
     return (mozpath.join(self._codegen_dir, '%s-example.h' % interface),
             mozpath.join(self._codegen_dir, '%s-example.cpp' % interface))