def _get_manager_args(self):
        tmp = tempfile.mkdtemp()
        self.addCleanup(shutil.rmtree, tmp)

        cache_dir = mozpath.join(tmp, 'cache')
        os.mkdir(cache_dir)

        ip = self._static_input_paths

        inputs = (
            ip,
            {mozpath.splitext(mozpath.basename(p))[0] for p in ip},
            set(),
            set(),
        )

        return dict(
            config_path=self._config_path,
            inputs=inputs,
            exported_header_dir=mozpath.join(tmp, 'exports'),
            codegen_dir=mozpath.join(tmp, 'codegen'),
            state_path=mozpath.join(tmp, 'state.json'),
            make_deps_path=mozpath.join(tmp, 'codegen.pp'),
            make_deps_target='codegen.pp',
            cache_dir=cache_dir,
        )
예제 #2
0
    def register_idl(self, idl, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(idl.source_path)
        root = mozpath.splitext(basename)[0]
        xpt = "%s.xpt" % idl.module
        manifest = mozpath.join(idl.install_target, "components", "interfaces.manifest")
        chrome_manifest = mozpath.join(idl.install_target, "chrome.manifest")

        entry = {
            "source": idl.source_path,
            "module": idl.module,
            "basename": basename,
            "root": root,
            "manifest": manifest,
        }

        if not allow_existing and entry["basename"] in self.idls:
            raise Exception("IDL already registered: %s" % entry["basename"])

        self.idls[entry["basename"]] = entry
        t = self.modules.setdefault(entry["module"], (idl.install_target, set()))
        t[1].add(entry["root"])

        if idl.add_to_manifest:
            self.interface_manifests.setdefault(manifest, set()).add(xpt)
            self.chrome_manifests.add(chrome_manifest)
예제 #3
0
 def _handle_manifest_entry(self, entry, jars):
     jarpath = None
     if isinstance(entry, ManifestEntryWithRelPath) and \
             urlparse(entry.relpath).scheme == 'jar':
         jarpath, entry = self._unjarize(entry, entry.relpath)
     elif isinstance(entry, ManifestResource) and \
             urlparse(entry.target).scheme == 'jar':
         jarpath, entry = self._unjarize(entry, entry.target)
     if jarpath:
         # Don't defer unpacking the jar file. If we already saw
         # it, take (and remove) it from the registry. If we
         # haven't, try to find it now.
         if self.files.contains(jarpath):
             jar = self.files[jarpath]
             self.files.remove(jarpath)
         else:
             jar = [f for p, f in FileFinder.find(self, jarpath)]
             assert len(jar) == 1
             jar = jar[0]
         if not jarpath in jars:
             base = mozpath.splitext(jarpath)[0]
             for j in self._open_jar(jarpath, jar):
                 self.files.add(mozpath.join(base,
                                                  j.filename),
                                DeflatedFile(j))
         jars.add(jarpath)
         self.kind = 'jar'
     return entry
예제 #4
0
    def _write_global_derived(self):
        from Codegen import GlobalGenRoots

        things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES]
        things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES)

        result = (set(), set(), set())

        for what, filename in things:
            stem = mozpath.splitext(filename)[0]
            root = getattr(GlobalGenRoots, stem)(self._config)

            if what == 'declare':
                code = root.declare()
                output_root = self._exported_header_dir
            elif what == 'define':
                code = root.define()
                output_root = self._codegen_dir
            else:
                raise Exception('Unknown global gen type: %s' % what)

            output_path = mozpath.join(output_root, filename)
            self._maybe_write_file(output_path, code, result)

        return result
예제 #5
0
    def _binding_info(self, p):
        """Compute binding metadata for an input path.

        Returns a tuple of:

          (stem, binding_stem, is_event, output_files)

        output_files is itself a tuple. The first two items are the binding
        header and C++ paths, respectively. The 2nd pair are the event header
        and C++ paths or None if this isn't an event binding.
        """
        basename = mozpath.basename(p)
        stem = mozpath.splitext(basename)[0]
        binding_stem = '%sBinding' % stem

        if stem in self._exported_stems:
            header_dir = self._exported_header_dir
        else:
            header_dir = self._codegen_dir

        is_event = stem in self._generated_events_stems

        files = (
            mozpath.join(header_dir, '%s.h' % binding_stem),
            mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem),
            mozpath.join(header_dir, '%s.h' % stem) if is_event else None,
            mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None,
        )

        return stem, binding_stem, is_event, header_dir, files
예제 #6
0
    def register_idl(self, idl, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(idl.source_path)
        root = mozpath.splitext(basename)[0]
        xpt = '%s.xpt' % idl.module
        manifest = mozpath.join(idl.install_target, 'components', 'interfaces.manifest')
        chrome_manifest = mozpath.join(idl.install_target, 'chrome.manifest')

        entry = {
            'source': idl.source_path,
            'module': idl.module,
            'basename': basename,
            'root': root,
            'manifest': manifest,
        }

        if not allow_existing and entry['basename'] in self.idls:
            raise Exception('IDL already registered: %s' % entry['basename'])

        self.idls[entry['basename']] = entry
        t = self.modules.setdefault(entry['module'], (idl.install_target, set()))
        t[1].add(entry['root'])

        if idl.add_to_manifest:
            self.interface_manifests.setdefault(manifest, set()).add(xpt)
            self.chrome_manifests.add(chrome_manifest)
예제 #7
0
        def files_from(ipdl):
            base = mozpath.basename(ipdl)
            root, ext = mozpath.splitext(base)

            # Both .ipdl and .ipdlh become .cpp files
            files = ["%s.cpp" % root]
            if ext == ".ipdl":
                # .ipdl also becomes Child/Parent.cpp files
                files.extend(["%sChild.cpp" % root, "%sParent.cpp" % root])
            return files
예제 #8
0
 def _unjarize(self, entry, relpath):
     '''
     Transform a manifest entry pointing to chrome data in a jar in one
     pointing to the corresponding unpacked path. Return the jar path and
     the new entry.
     '''
     base = entry.base
     jar, relpath = urlparse(relpath).path.split('!', 1)
     entry = entry.rebase(mozpath.join(base, 'jar:%s!' % jar)) \
         .move(mozpath.join(base, mozpath.splitext(jar)[0])) \
         .rebase(base)
     return mozpath.join(base, jar), entry
예제 #9
0
    def _preprocess(self, backend_file, input_file, destdir=None):
        cmd = self._py_action('preprocessor')
        cmd.extend(backend_file.defines)
        cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o'])

        base_input = mozpath.basename(input_file)
        if base_input.endswith('.in'):
            base_input = mozpath.splitext(base_input)[0]
        output = mozpath.join(destdir, base_input) if destdir else base_input

        backend_file.rule(
            inputs=[input_file],
            display='Preprocess %o',
            cmd=cmd,
            outputs=[output],
        )
예제 #10
0
파일: common.py 프로젝트: kanru/gecko-dev
    def register_idl(self, source, module, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(source)
        root = mozpath.splitext(basename)[0]

        entry = {
            'source': source,
            'module': module,
            'basename': basename,
            'root': root,
        }

        if not allow_existing and entry['basename'] in self.idls:
            raise Exception('IDL already registered: %' % entry['basename'])

        self.idls[entry['basename']] = entry
        self.modules.setdefault(entry['module'], set()).add(entry['root'])
예제 #11
0
파일: common.py 프로젝트: JasonGross/mozjs
    def register_idl(self, source, module, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(source)
        root = mozpath.splitext(basename)[0]

        entry = {
            'source': source,
            'module': module,
            'basename': basename,
            'root': root,
        }

        if not allow_existing and entry['basename'] in self.idls:
            raise Exception('IDL already registered: %' % entry['basename'])

        self.idls[entry['basename']] = entry
        self.modules.setdefault(entry['module'], set()).add(entry['root'])
예제 #12
0
파일: tup.py 프로젝트: luke-chang/gecko-1
    def _preprocess(self, backend_file, input_file, destdir=None):
        # .css files use '%' as the preprocessor marker, which must be scaped as
        # '%%' in the Tupfile.
        marker = '%%' if input_file.endswith('.css') else '#'

        cmd = self._py_action('preprocessor')
        cmd.extend([shell_quote(d) for d in backend_file.defines])
        cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker])

        base_input = mozpath.basename(input_file)
        if base_input.endswith('.in'):
            base_input = mozpath.splitext(base_input)[0]
        output = mozpath.join(destdir, base_input) if destdir else base_input

        backend_file.rule(
            inputs=[input_file],
            display='Preprocess %o',
            cmd=cmd,
            outputs=[output],
        )
예제 #13
0
    def register_idl(self, idl, allow_existing=False):
        """Registers an IDL file with this instance.

        The IDL file will be built, installed, etc.
        """
        basename = mozpath.basename(idl.source_path)
        root = mozpath.splitext(basename)[0]
        xpt = '%s.xpt' % idl.module

        entry = {
            'source': idl.source_path,
            'module': idl.module,
            'basename': basename,
            'root': root,
        }

        if not allow_existing and entry['basename'] in self.idls:
            raise Exception('IDL already registered: %s' % entry['basename'])

        self.idls[entry['basename']] = entry
        t = self.modules.setdefault(entry['module'], (idl.install_target, set()))
        t[1].add(entry['root'])
예제 #14
0
    def _write_global_derived(self):
        things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES]
        things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES)

        result = (set(), set(), set())

        for what, filename in things:
            stem = mozpath.splitext(filename)[0]
            root = getattr(GlobalGenRoots, stem)(self._config)

            if what == 'declare':
                code = root.declare()
                output_root = self._exported_header_dir
            elif what == 'define':
                code = root.define()
                output_root = self._codegen_dir
            else:
                raise Exception('Unknown global gen type: %s' % what)

            output_path = mozpath.join(output_root, filename)
            self._maybe_write_file(output_path, code, result)

        return result
예제 #15
0
파일: tup.py 프로젝트: rwaldron/gecko-dev
    def _preprocess(self, backend_file, input_file, destdir=None, target=None):
        if target is None:
            target = mozpath.basename(input_file)
        # .css files use '%' as the preprocessor marker, which must be scaped as
        # '%%' in the Tupfile.
        marker = '%%' if target.endswith('.css') else '#'

        cmd = self._py_action('preprocessor')
        cmd.extend([shell_quote(d) for d in backend_file.defines])
        cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker])

        base_input = mozpath.basename(target)
        if base_input.endswith('.in'):
            base_input = mozpath.splitext(base_input)[0]
        output = mozpath.join(destdir, base_input) if destdir else base_input

        backend_file.rule(
            inputs=[input_file],
            extra_inputs=[self._early_generated_files],
            display='Preprocess %o',
            cmd=cmd,
            outputs=[output],
        )
예제 #16
0
def read_from_gyp(config, path, output, vars, non_unified_sources = set()):
    """Read a gyp configuration and emits GypSandboxes for the backend to
    process.

    config is a ConfigEnvironment, path is the path to a root gyp configuration
    file, output is the base path under which the objdir for the various gyp
    dependencies will be, and vars a dict of variables to pass to the gyp
    processor.
    """

    time_start = time.time()
    all_sources = set()

    # gyp expects plain str instead of unicode. The frontend code gives us
    # unicode strings, so convert them.
    path = encode(path)
    str_vars = dict((name, encode(value)) for name, value in vars.items())

    params = {
        b'parallel': False,
        b'generator_flags': {},
        b'build_files': [path],
    }

    # Files that gyp_chromium always includes
    includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
    finder = FileFinder(chrome_src, find_executables=False)
    includes.extend(encode(mozpath.join(chrome_src, name))
        for name, _ in finder.find('*/supplement.gypi'))

    # Read the given gyp file and its dependencies.
    generator, flat_list, targets, data = \
        gyp.Load([path], format=b'mozbuild',
            default_variables=str_vars,
            includes=includes,
            depth=encode(mozpath.dirname(path)),
            params=params)

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
        # The list of included files returned by gyp are relative to build_file
        included_files = [mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))
                          for f in data[build_file]['included_files']]
        # Emit a sandbox for each target.
        sandbox = GypSandbox(mozpath.abspath(build_file), included_files)
        sandbox.config = config

        with sandbox.allow_all_writes() as d:
            topsrcdir = d['TOPSRCDIR'] = config.topsrcdir
            d['TOPOBJDIR'] = config.topobjdir
            relsrcdir = d['RELATIVEDIR'] = mozpath.relpath(mozpath.dirname(build_file), config.topsrcdir)
            d['SRCDIR'] = mozpath.join(topsrcdir, relsrcdir)

            # Each target is given its own objdir. The base of that objdir
            # is derived from the relative path from the root gyp file path
            # to the current build_file, placed under the given output
            # directory. Since several targets can be in a given build_file,
            # separate them in subdirectories using the build_file basename
            # and the target_name.
            reldir  = mozpath.relpath(mozpath.dirname(build_file),
                                      mozpath.dirname(path))
            subdir = '%s_%s' % (
                mozpath.splitext(mozpath.basename(build_file))[0],
                target_name,
            )
            d['OBJDIR'] = mozpath.join(output, reldir, subdir)
            d['IS_GYP_DIR'] = True

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if spec['type'] == 'none':
            continue
        elif spec['type'] == 'static_library':
            sandbox['FORCE_STATIC_LIB'] = True
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if name.startswith('lib'):
                name = name[3:]
            # The sandbox expects an unicode string.
            sandbox['LIBRARY_NAME'] = name.decode('utf-8')
            # gyp files contain headers and asm sources in sources lists.
            sources = set(mozpath.normpath(mozpath.join(sandbox['SRCDIR'], f))
                for f in spec.get('sources', [])
                if mozpath.splitext(f)[-1] != '.h')
            asm_sources = set(f for f in sources if f.endswith('.S'))

            unified_sources = sources - non_unified_sources - asm_sources
            sources -= unified_sources
            all_sources |= sources
            # The sandbox expects alphabetical order when adding sources
            sandbox['SOURCES'] = alphabetical_sorted(sources)
            sandbox['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            for define in target_conf.get('defines', []):
                if '=' in define:
                    name, value = define.split('=', 1)
                    sandbox['DEFINES'][name] = value
                else:
                    sandbox['DEFINES'][define] = True

            for include in target_conf.get('include_dirs', []):
                sandbox['LOCAL_INCLUDES'] += [include]

            with sandbox.allow_all_writes() as d:
                d['EXTRA_ASSEMBLER_FLAGS'] = target_conf.get('asflags_mozilla', [])
                d['EXTRA_COMPILE_FLAGS'] = target_conf.get('cflags_mozilla', [])
        else:
            # Ignore other types than static_library because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])

        sandbox.execution_time = time.time() - time_start
        yield sandbox
        time_start = time.time()
예제 #17
0
파일: tup.py 프로젝트: dati91/gecko-dev
    def _handle_idl_manager(self, manager):
        if self.environment.is_artifact_build:
            return

        backend_file = self._get_backend_file('xpcom/xpidl')
        backend_file.export_shell()

        all_idl_directories = set()
        all_idl_directories.update(
            *map(lambda x: x[1], manager.modules.itervalues()))

        all_xpts = []
        for module, (idls, _) in sorted(manager.modules.iteritems()):
            cmd = [
                '$(PYTHON_PATH)',
                '$(PLY_INCLUDE)',
                '-I$(IDL_PARSER_DIR)',
                '-I$(IDL_PARSER_CACHE_DIR)',
                '$(topsrcdir)/python/mozbuild/mozbuild/action/xpidl-process.py',
                '--cache-dir',
                '$(IDL_PARSER_CACHE_DIR)',
                '--bindings-conf',
                '$(topsrcdir)/dom/bindings/Bindings.conf',
            ]

            for d in all_idl_directories:
                cmd.extend(['-I', d])

            cmd.extend([
                '$(DIST)/include',
                '$(DIST)/xpcrs',
                '.',
                module,
            ])
            cmd.extend(sorted(idls))

            all_xpts.append('$(MOZ_OBJ_ROOT)/%s/%s.xpt' %
                            (backend_file.relobjdir, module))
            outputs = ['%s.xpt' % module]
            stems = sorted(
                mozpath.splitext(mozpath.basename(idl))[0] for idl in idls)
            outputs.extend(
                ['$(MOZ_OBJ_ROOT)/dist/include/%s.h' % f for f in stems])
            outputs.extend(
                ['$(MOZ_OBJ_ROOT)/dist/xpcrs/rt/%s.rs' % f for f in stems])
            outputs.extend(
                ['$(MOZ_OBJ_ROOT)/dist/xpcrs/bt/%s.rs' % f for f in stems])
            backend_file.rule(
                inputs=[
                    '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidllex.py',
                    '$(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl/xpidlyacc.py',
                    self._installed_idls,
                ],
                display='XPIDL %s' % module,
                cmd=cmd,
                outputs=outputs,
                output_group=self._installed_files,
                check_unchanged=True,
            )

        cpp_backend_file = self._get_backend_file('xpcom/reflect/xptinfo')
        cpp_backend_file.export_shell()
        cpp_backend_file.rule(
            inputs=all_xpts,
            display='XPIDL xptcodegen.py %o',
            cmd=[
                '$(PYTHON_PATH)',
                '$(PLY_INCLUDE)',
                '$(topsrcdir)/xpcom/reflect/xptinfo/xptcodegen.py',
                '%o',
                '%f',
            ],
            outputs=['xptdata.cpp'],
            check_unchanged=True,
        )
예제 #18
0
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
                       non_unified_sources, action_overrides):
    flat_list, targets, data = gyp_result
    no_chromium = gyp_dir_attrs.no_chromium
    no_unified = gyp_dir_attrs.no_unified

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir  = mozpath.relpath(mozpath.dirname(build_file),
                                  mozpath.dirname(path))
        subdir = '%s_%s' % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(config, mozpath.relpath(
            mozpath.join(output, reldir, subdir), config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]['included_files']:
            context.add_source(mozpath.abspath(mozpath.join(
                mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if 'actions' in spec:
          handle_actions(spec['actions'], context, action_overrides)
        if 'copies' in spec:
          handle_copies(spec['copies'], context)

        use_libs = []
        libs = []
        def add_deps(s):
            for t in s.get('dependencies', []) + s.get('dependencies_original', []):
                ty = targets[t]['type']
                if ty in ('static_library', 'shared_library'):
                    use_libs.append(targets[t]['target_name'])
                # Manually expand out transitive dependencies--
                # gyp won't do this for static libs or none targets.
                if ty in ('static_library', 'none'):
                    add_deps(targets[t])
            libs.extend(spec.get('libraries', []))
        #XXX: this sucks, but webrtc breaks with this right now because
        # it builds a library called 'gtest' and we just get lucky
        # that it isn't in USE_LIBS by that name anywhere.
        if no_chromium:
            add_deps(spec)

        os_libs = []
        for l in libs:
          if l.startswith('-'):
              os_libs.append(l)
          elif l.endswith('.lib'):
              os_libs.append(l[:-4])
          elif l:
            # For library names passed in from moz.build.
            use_libs.append(os.path.basename(l))

        if spec['type'] == 'none':
          if not ('actions' in spec or 'copies' in spec):
            continue
        elif spec['type'] in ('static_library', 'shared_library', 'executable'):
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if spec['type'] in ('static_library', 'shared_library'):
                if name.startswith('lib'):
                    name = name[3:]
                # The context expects an unicode string.
                context['LIBRARY_NAME'] = name.decode('utf-8')
            else:
                context['PROGRAM'] = name.decode('utf-8')
            if spec['type'] == 'shared_library':
                context['FORCE_SHARED_LIB'] = True
            elif spec['type'] == 'static_library' and spec.get('variables', {}).get('no_expand_libs', '0') == '1':
                # PSM links a NSS static library, but our folded libnss
                # doesn't actually export everything that all of the
                # objects within would need, so that one library
                # should be built as a real static library.
                context['NO_EXPAND_LIBS'] = True
            if use_libs:
                context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower())
            if os_libs:
                context['OS_LIBS'] = os_libs
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            use_defines_in_asflags = False
            for f in spec.get('sources', []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                if f.startswith('$INTERMEDIATE_DIR/'):
                  s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!'))
                else:
                  s = SourcePath(context, f)
                if ext == '.h':
                    continue
                if ext == '.def':
                    context['SYMBOLS_FILE'] = s
                elif ext != '.S' and not no_unified and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)
                # The Mozilla build system doesn't use DEFINES for building
                # ASFILES.
                if ext == '.s':
                    use_defines_in_asflags = True

            # The context expects alphabetical order when adding sources
            context['SOURCES'] = alphabetical_sorted(sources)
            context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            defines = target_conf.get('defines', [])
            if bool(config.substs['_MSC_VER']) and no_chromium:
                msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
                defines.extend(msvs_settings.GetComputedDefines(c))
            for define in defines:
                if '=' in define:
                    name, value = define.split('=', 1)
                    context['DEFINES'][name] = value
                else:
                    context['DEFINES'][define] = True

            product_dir_dist = '$PRODUCT_DIR/dist/'
            for include in target_conf.get('include_dirs', []):
                if include.startswith(product_dir_dist):
                    # special-case includes of <(PRODUCT_DIR)/dist/ to match
                    # handle_copies above. This is used for NSS' exports.
                    include = '!/dist/include/' + include[len(product_dir_dist):]
                elif include.startswith(config.topobjdir):
                    # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
                    include = '!/' + mozpath.relpath(include, config.topobjdir)
                else:
                  # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                  #
                  # NB: gyp files sometimes have actual absolute paths (e.g.
                  # /usr/include32) and sometimes paths that moz.build considers
                  # absolute, i.e. starting from topsrcdir. There's no good way
                  # to tell them apart here, and the actual absolute paths are
                  # likely bogus. In any event, actual absolute paths will be
                  # filtered out by trying to find them in topsrcdir.
                  if include.startswith('/'):
                      resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
                  else:
                      resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
                  if not os.path.exists(resolved):
                      continue
                context['LOCAL_INCLUDES'] += [include]

            context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
            if use_defines_in_asflags and defines:
                context['ASFLAGS'] += ['-D' + d for d in defines]
            flags = target_conf.get('cflags_mozilla', [])
            if flags:
                suffix_map = {
                    '.c': 'CFLAGS',
                    '.cpp': 'CXXFLAGS',
                    '.cc': 'CXXFLAGS',
                    '.m': 'CMFLAGS',
                    '.mm': 'CMMFLAGS',
                }
                variables = (
                    suffix_map[e]
                    for e in extensions if e in suffix_map
                )
                for var in variables:
                    for f in flags:
                        # We may be getting make variable references out of the
                        # gyp data, and we don't want those in emitted data, so
                        # substitute them with their actual value.
                        f = expand_variables(f, config.substs).split()
                        if not f:
                            continue
                        # the result may be a string or a list.
                        if isinstance(f, types.StringTypes):
                            context[var].append(f)
                        else:
                            context[var].extend(f)
        else:
            # Ignore other types because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])

        if not no_chromium:
          # Add some features to all contexts. Put here in case LOCAL_INCLUDES
          # order matters.
          context['LOCAL_INCLUDES'] += [
              '!/ipc/ipdl/_ipdlheaders',
              '/ipc/chromium/src',
              '/ipc/glue',
          ]
          # These get set via VC project file settings for normal GYP builds.
          if config.substs['OS_TARGET'] == 'WINNT':
              context['DEFINES']['UNICODE'] = True
              context['DEFINES']['_UNICODE'] = True
        context['DISABLE_STL_WRAPPING'] = True

        for key, value in gyp_dir_attrs.sandbox_vars.items():
            if context.get(key) and isinstance(context[key], list):
                # If we have a key from sanbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context[key] = value + context[key]
            else:
                context[key] = value

        yield context
예제 #19
0
 def canonical_suffix_for_file(f):
     return canonicalized_suffix_map[mozpath.splitext(f)[1]]
예제 #20
0
 def test_splitext(self):
     self.assertEqual(splitext(self.SEP.join(('foo', 'bar', 'baz.qux'))),
                      ('foo/bar/baz', '.qux'))
예제 #21
0
    def emit_from_sandbox(self, sandbox):
        """Convert a MozbuildSandbox to tree metadata objects.

        This is a generator of mozbuild.frontend.data.SandboxDerived instances.
        """
        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_sandbox(sandbox):
            yield o

        for path in sandbox['CONFIGURE_SUBST_FILES']:
            yield self._create_substitution(ConfigFileSubstitution, sandbox,
                                            path)

        for path in sandbox['CONFIGURE_DEFINE_FILES']:
            yield self._create_substitution(HeaderFileSubstitution, sandbox,
                                            path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = sandbox['XPIDL_MODULE']

        if sandbox['XPIDL_SOURCES'] and not xpidl_module:
            raise SandboxValidationError('XPIDL_MODULE must be defined if '
                                         'XPIDL_SOURCES is defined.')

        if xpidl_module and not sandbox['XPIDL_SOURCES']:
            raise SandboxValidationError('XPIDL_MODULE cannot be defined '
                                         'unless there are XPIDL_SOURCES: %s' %
                                         sandbox['RELATIVEDIR'])

        if sandbox['XPIDL_SOURCES'] and sandbox['NO_DIST_INSTALL']:
            self.log(
                logging.WARN, 'mozbuild_warning', dict(path=sandbox.main_path),
                '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.')

        for idl in sandbox['XPIDL_SOURCES']:
            yield XPIDLFile(sandbox, mozpath.join(sandbox['SRCDIR'], idl),
                            xpidl_module)

        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (sandbox[symbol] or []):
                if not os.path.exists(mozpath.join(sandbox['SRCDIR'], src)):
                    raise SandboxValidationError(
                        'Reference to a file that '
                        'doesn\'t exist in %s (%s) in %s' %
                        (symbol, src, sandbox['RELATIVEDIR']))

        if sandbox.get('LIBXUL_LIBRARY') and sandbox.get('FORCE_STATIC_LIB'):
            raise SandboxValidationError(
                'LIBXUL_LIBRARY implies FORCE_STATIC_LIB')

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(sandbox)
        varlist = [
            'ANDROID_GENERATED_RESFILES',
            'ANDROID_RES_DIRS',
            'CPP_UNIT_TESTS',
            'EXPORT_LIBRARY',
            'EXTRA_ASSEMBLER_FLAGS',
            'EXTRA_COMPILE_FLAGS',
            'EXTRA_COMPONENTS',
            'EXTRA_JS_MODULES',
            'EXTRA_PP_COMPONENTS',
            'EXTRA_PP_JS_MODULES',
            'FAIL_ON_WARNINGS',
            'FILES_PER_UNIFIED_FILE',
            'FORCE_SHARED_LIB',
            'FORCE_STATIC_LIB',
            'GENERATED_FILES',
            'HOST_LIBRARY_NAME',
            'IS_COMPONENT',
            'IS_GYP_DIR',
            'JS_MODULES_PATH',
            'LIBS',
            'LIBXUL_LIBRARY',
            'MSVC_ENABLE_PGO',
            'NO_DIST_INSTALL',
            'OS_LIBS',
            'SDK_LIBRARY',
        ]
        for v in varlist:
            if v in sandbox and sandbox[v]:
                passthru.variables[v] = sandbox[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if sandbox['NO_VISIBILITY_FLAGS']:
            passthru.variables['VISIBILITY_FLAGS'] = ''

        varmap = dict(SOURCES={
            '.s': 'ASFILES',
            '.asm': 'ASFILES',
            '.c': 'CSRCS',
            '.m': 'CMSRCS',
            '.mm': 'CMMSRCS',
            '.cc': 'CPPSRCS',
            '.cpp': 'CPPSRCS',
            '.S': 'SSRCS',
        },
                      HOST_SOURCES={
                          '.c': 'HOST_CSRCS',
                          '.mm': 'HOST_CMMSRCS',
                          '.cc': 'HOST_CPPSRCS',
                          '.cpp': 'HOST_CPPSRCS',
                      },
                      UNIFIED_SOURCES={
                          '.c': 'UNIFIED_CSRCS',
                          '.mm': 'UNIFIED_CMMSRCS',
                          '.cc': 'UNIFIED_CPPSRCS',
                          '.cpp': 'UNIFIED_CPPSRCS',
                      })
        varmap.update(
            dict(('GENERATED_%s' % k, v) for k, v in varmap.items()
                 if k in ('SOURCES', 'UNIFIED_SOURCES')))
        for variable, mapping in varmap.items():
            for f in sandbox[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in mapping:
                    raise SandboxValidationError(
                        '%s has an unknown file type in %s' %
                        (f, sandbox['RELATIVEDIR']))
                l = passthru.variables.setdefault(mapping[ext], [])
                l.append(f)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

        no_pgo = sandbox.get('NO_PGO')
        sources = sandbox.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError(
                    'NO_PGO and SOURCES[...].no_pgo cannot be set at the same time'
                )
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        exports = sandbox.get('EXPORTS')
        if exports:
            yield Exports(
                sandbox,
                exports,
                dist_install=not sandbox.get('NO_DIST_INSTALL', False))

        defines = sandbox.get('DEFINES')
        if defines:
            yield Defines(sandbox, defines)

        program = sandbox.get('PROGRAM')
        if program:
            yield Program(sandbox, program, sandbox['CONFIG']['BIN_SUFFIX'])

        program = sandbox.get('HOST_PROGRAM')
        if program:
            yield HostProgram(sandbox, program,
                              sandbox['CONFIG']['HOST_BIN_SUFFIX'])

        for program in sandbox['SIMPLE_PROGRAMS']:
            yield SimpleProgram(sandbox, program,
                                sandbox['CONFIG']['BIN_SUFFIX'])

        for program in sandbox['HOST_SIMPLE_PROGRAMS']:
            yield HostSimpleProgram(sandbox, program,
                                    sandbox['CONFIG']['HOST_BIN_SUFFIX'])

        simple_lists = [
            ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
            ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
            ('IPDL_SOURCES', IPDLFile),
            ('LOCAL_INCLUDES', LocalInclude),
            ('GENERATED_INCLUDES', GeneratedInclude),
            ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
            ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
            ('TEST_WEBIDL_FILES', TestWebIDLFile),
            ('WEBIDL_FILES', WebIDLFile),
            ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
        ]
        for sandbox_var, klass in simple_lists:
            for name in sandbox.get(sandbox_var, []):
                yield klass(sandbox, name)

        if sandbox.get('FINAL_TARGET') or sandbox.get('XPI_NAME') or \
                sandbox.get('DIST_SUBDIR'):
            yield InstallationTarget(sandbox)

        libname = sandbox.get('LIBRARY_NAME')
        final_lib = sandbox.get('FINAL_LIBRARY')
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = sandbox['RELATIVEDIR'].replace('/', '_')
        if libname:
            self._libs.setdefault(libname, {})[sandbox['OBJDIR']] = \
                LibraryDefinition(sandbox, libname)

        if final_lib:
            if isinstance(sandbox,
                          MozbuildSandbox) and sandbox.get('FORCE_STATIC_LIB'):
                raise SandboxValidationError(
                    'FINAL_LIBRARY implies FORCE_STATIC_LIB')
            self._final_libs.append((sandbox['OBJDIR'], libname, final_lib))
            passthru.variables['FORCE_STATIC_LIB'] = True

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, active)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # active indicates whether to filter out inactive tests from the
        #     manifest.
        #
        # We ideally don't filter out inactive tests. However, not every test
        # harness can yet deal with test filtering. Once all harnesses can do
        # this, this feature can be dropped.
        test_manifests = dict(
            A11Y=('a11y', 'testing/mochitest/a11y', True),
            BROWSER_CHROME=('browser-chrome', 'testing/mochitest/browser',
                            True),
            METRO_CHROME=('metro-chrome', 'testing/mochitest/metro', True),
            MOCHITEST=('mochitest', 'testing/mochitest/tests', True),
            MOCHITEST_CHROME=('chrome', 'testing/mochitest/chrome', True),
            MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome',
                                       'testing/mochitest/webapprtChrome',
                                       True),
            WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', True),
            XPCSHELL_TESTS=('xpcshell', 'xpcshell', False),
        )

        for prefix, info in test_manifests.items():
            for path in sandbox.get('%s_MANIFESTS' % prefix, []):
                for obj in self._process_test_manifest(sandbox, info, path):
                    yield obj

        jar_manifests = sandbox.get('JAR_MANIFESTS', [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError(
                'While JAR_MANIFESTS is a list, '
                'it is currently limited to one value.')

        for path in jar_manifests:
            yield JARManifest(sandbox, mozpath.join(sandbox['SRCDIR'], path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(sandbox['SRCDIR'], 'jar.mn')):
            if 'jar.mn' not in jar_manifests:
                raise SandboxValidationError(
                    'A jar.mn exists in %s but it '
                    'is not referenced in the corresponding moz.build file. '
                    'Please define JAR_MANIFESTS in the moz.build file.' %
                    sandbox['SRCDIR'])

        for name, jar in sandbox.get('JAVA_JAR_TARGETS', {}).items():
            yield SandboxWrapped(sandbox, jar)

        if passthru.variables:
            yield passthru
예제 #22
0
파일: data.py 프로젝트: isabella232/juggler
 def generated_events_stems(self):
     return [
         mozpath.splitext(b)[0] for b in self.generated_events_basenames()
     ]
예제 #23
0
    def emit_from_context(self, context):
        """Convert a Context to tree metadata objects.

        This is a generator of mozbuild.frontend.data.ContextDerived instances.
        """

        # We only want to emit an InstallationTarget if one of the consulted
        # variables is defined. Later on, we look up FINAL_TARGET, which has
        # the side-effect of populating it. So, we need to do this lookup
        # early.
        if any(k in context
               for k in ('FINAL_TARGET', 'XPI_NAME', 'DIST_SUBDIR')):
            yield InstallationTarget(context)

        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_context(context):
            yield o

        for path in context['CONFIGURE_SUBST_FILES']:
            yield self._create_substitution(ConfigFileSubstitution, context,
                                            path)

        for path in context['CONFIGURE_DEFINE_FILES']:
            yield self._create_substitution(HeaderFileSubstitution, context,
                                            path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = context['XPIDL_MODULE']

        if context['XPIDL_SOURCES'] and not xpidl_module:
            raise SandboxValidationError(
                'XPIDL_MODULE must be defined if '
                'XPIDL_SOURCES is defined.', context)

        if xpidl_module and not context['XPIDL_SOURCES']:
            raise SandboxValidationError(
                'XPIDL_MODULE cannot be defined '
                'unless there are XPIDL_SOURCES', context)

        if context['XPIDL_SOURCES'] and context['NO_DIST_INSTALL']:
            self.log(
                logging.WARN, 'mozbuild_warning', dict(path=context.main_path),
                '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.')

        for idl in context['XPIDL_SOURCES']:
            yield XPIDLFile(context, mozpath.join(context.srcdir, idl),
                            xpidl_module)

        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (context[symbol] or []):
                if not os.path.exists(mozpath.join(context.srcdir, src)):
                    raise SandboxValidationError(
                        'File listed in %s does not '
                        'exist: \'%s\'' % (symbol, src), context)

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(context)
        varlist = [
            'ANDROID_GENERATED_RESFILES',
            'ANDROID_RES_DIRS',
            'DISABLE_STL_WRAPPING',
            'EXTRA_ASSEMBLER_FLAGS',
            'EXTRA_COMPILE_FLAGS',
            'EXTRA_COMPONENTS',
            'EXTRA_DSO_LDOPTS',
            'EXTRA_PP_COMPONENTS',
            'FAIL_ON_WARNINGS',
            'USE_STATIC_LIBS',
            'IS_GYP_DIR',
            'MSVC_ENABLE_PGO',
            'NO_DIST_INSTALL',
            'PYTHON_UNIT_TESTS',
            'RCFILE',
            'RESFILE',
            'RCINCLUDE',
            'DEFFILE',
            'WIN32_EXE_LDFLAGS',
            'LD_VERSION_SCRIPT',
        ]
        for v in varlist:
            if v in context and context[v]:
                passthru.variables[v] = context[v]

        if context.config.substs.get('OS_TARGET') == 'WINNT' and \
                context['DELAYLOAD_DLLS']:
            context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll)
                                       for dll in context['DELAYLOAD_DLLS']])
            context['OS_LIBS'].append('delayimp')

        for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']:
            if v in context and context[v]:
                passthru.variables['MOZBUILD_' + v] = context[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if context['NO_VISIBILITY_FLAGS']:
            passthru.variables['VISIBILITY_FLAGS'] = ''

        no_pgo = context.get('NO_PGO')
        sources = context.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError(
                    'NO_PGO and SOURCES[...].no_pgo '
                    'cannot be set at the same time', context)
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        # A map from "canonical suffixes" for a particular source file
        # language to the range of suffixes associated with that language.
        #
        # We deliberately don't list the canonical suffix in the suffix list
        # in the definition; we'll add it in programmatically after defining
        # things.
        suffix_map = {
            '.s': set(['.asm']),
            '.c': set(),
            '.m': set(),
            '.mm': set(),
            '.cpp': set(['.cc', '.cxx']),
            '.S': set(),
        }

        # The inverse of the above, mapping suffixes to their canonical suffix.
        canonicalized_suffix_map = {}
        for suffix, alternatives in suffix_map.iteritems():
            alternatives.add(suffix)
            for a in alternatives:
                canonicalized_suffix_map[a] = suffix

        def canonical_suffix_for_file(f):
            return canonicalized_suffix_map[mozpath.splitext(f)[1]]

        # A map from moz.build variables to the canonical suffixes of file
        # kinds that can be listed therein.
        all_suffixes = list(suffix_map.keys())
        varmap = dict(
            SOURCES=(Sources, all_suffixes),
            HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
            UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
            GENERATED_SOURCES=(GeneratedSources, all_suffixes),
        )

        for variable, (klass, suffixes) in varmap.items():
            allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])

            # First ensure that we haven't been given filetypes that we don't
            # recognize.
            for f in context[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in allowed_suffixes:
                    raise SandboxValidationError(
                        '%s has an unknown file type.' % f, context)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

            # Now sort the files to let groupby work.
            sorted_files = sorted(context[variable],
                                  key=canonical_suffix_for_file)
            for canonical_suffix, files in itertools.groupby(
                    sorted_files, canonical_suffix_for_file):
                arglist = [context, list(files), canonical_suffix]
                if variable.startswith(
                        'UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
                    arglist.append(context['FILES_PER_UNIFIED_FILE'])
                yield klass(*arglist)

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(context, f, sources[f].flags)

        exports = context.get('EXPORTS')
        if exports:
            yield Exports(
                context,
                exports,
                dist_install=not context.get('NO_DIST_INSTALL', False))

        generated_files = context.get('GENERATED_FILES')
        if generated_files:
            for f in generated_files:
                flags = generated_files[f]
                output = f
                if flags.script:
                    script = mozpath.join(context.srcdir, flags.script)
                    inputs = [
                        mozpath.join(context.srcdir, i) for i in flags.inputs
                    ]

                    if not os.path.exists(script):
                        raise SandboxValidationError(
                            'Script for generating %s does not exist: %s' %
                            (f, script), context)
                    if os.path.splitext(script)[1] != '.py':
                        raise SandboxValidationError(
                            'Script for generating %s does not end in .py: %s'
                            % (f, script), context)
                    for i in inputs:
                        if not os.path.exists(i):
                            raise SandboxValidationError(
                                'Input for generating %s does not exist: %s' %
                                (f, i), context)
                else:
                    script = None
                    inputs = []
                yield GeneratedFile(context, script, output, inputs)

        test_harness_files = context.get('TEST_HARNESS_FILES')
        if test_harness_files:
            srcdir_files = defaultdict(list)
            srcdir_pattern_files = defaultdict(list)
            objdir_files = defaultdict(list)

            for path, strings in test_harness_files.walk():
                if not path and strings:
                    raise SandboxValidationError(
                        'Cannot install files to the root of TEST_HARNESS_FILES',
                        context)

                for s in strings:
                    if context.is_objdir_path(s):
                        if s.startswith('!/'):
                            objdir_files[path].append('$(DEPTH)/%s' % s[2:])
                        else:
                            objdir_files[path].append(s[1:])
                    else:
                        resolved = context.resolve_path(s)
                        if '*' in s:
                            srcdir_pattern_files[path].append(s)
                        elif not os.path.exists(resolved):
                            raise SandboxValidationError(
                                'File listed in TEST_HARNESS_FILES does not exist: %s'
                                % s, context)
                        else:
                            srcdir_files[path].append(resolved)

            yield TestHarnessFiles(context, srcdir_files, srcdir_pattern_files,
                                   objdir_files)

        defines = context.get('DEFINES')
        if defines:
            yield Defines(context, defines)

        resources = context.get('RESOURCE_FILES')
        if resources:
            yield Resources(context, resources, defines)

        for pref in sorted(context['JS_PREFERENCE_FILES']):
            yield JsPreferenceFile(context, pref)

        for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
            program = context.get(kind)
            if program:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" as %s name, '
                        'because it is already used in %s' %
                        (program, kind, self._binaries[program].relativedir),
                        context)
                self._binaries[program] = cls(context, program)
                self._linkage.append((context, self._binaries[program],
                                      kind.replace('PROGRAM', 'USE_LIBS')))

        for kind, cls in [('SIMPLE_PROGRAMS', SimpleProgram),
                          ('CPP_UNIT_TESTS', SimpleProgram),
                          ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]:
            for program in context[kind]:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" in %s, '
                        'because it is already used in %s' %
                        (program, kind, self._binaries[program].relativedir),
                        context)
                self._binaries[program] = cls(
                    context, program, is_unit_test=kind == 'CPP_UNIT_TESTS')
                self._linkage.append(
                    (context, self._binaries[program], 'HOST_USE_LIBS'
                     if kind == 'HOST_SIMPLE_PROGRAMS' else 'USE_LIBS'))

        extra_js_modules = context.get('EXTRA_JS_MODULES')
        if extra_js_modules:
            yield JavaScriptModules(context, extra_js_modules, 'extra')

        extra_pp_js_modules = context.get('EXTRA_PP_JS_MODULES')
        if extra_pp_js_modules:
            yield JavaScriptModules(context, extra_pp_js_modules, 'extra_pp')

        test_js_modules = context.get('TESTING_JS_MODULES')
        if test_js_modules:
            yield JavaScriptModules(context, test_js_modules, 'testing')

        simple_lists = [
            ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
            ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
            ('IPDL_SOURCES', IPDLFile),
            ('GENERATED_INCLUDES', GeneratedInclude),
            ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
            ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
            ('TEST_WEBIDL_FILES', TestWebIDLFile),
            ('WEBIDL_FILES', WebIDLFile),
            ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
        ]
        for context_var, klass in simple_lists:
            for name in context.get(context_var, []):
                yield klass(context, name)

        for local_include in context.get('LOCAL_INCLUDES', []):
            if local_include.startswith('/'):
                path = context.config.topsrcdir
                relative_include = local_include[1:]
            else:
                path = context.srcdir
                relative_include = local_include

            actual_include = os.path.join(path, relative_include)
            if not os.path.exists(actual_include):
                raise SandboxValidationError(
                    'Path specified in LOCAL_INCLUDES '
                    'does not exist: %s (resolved to %s)' %
                    (local_include, actual_include), context)
            yield LocalInclude(context, local_include)

        final_target_files = context.get('FINAL_TARGET_FILES')
        if final_target_files:
            yield FinalTargetFiles(context, final_target_files,
                                   context['FINAL_TARGET'])

        branding_files = context.get('BRANDING_FILES')
        if branding_files:
            yield BrandingFiles(context, branding_files)

        host_libname = context.get('HOST_LIBRARY_NAME')
        libname = context.get('LIBRARY_NAME')

        if host_libname:
            if host_libname == libname:
                raise SandboxValidationError(
                    'LIBRARY_NAME and '
                    'HOST_LIBRARY_NAME must have a different value', context)
            lib = HostLibrary(context, host_libname)
            self._libs[host_libname].append(lib)
            self._linkage.append((context, lib, 'HOST_USE_LIBS'))

        final_lib = context.get('FINAL_LIBRARY')
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = context.relsrcdir.replace('/', '_')

        static_lib = context.get('FORCE_STATIC_LIB')
        shared_lib = context.get('FORCE_SHARED_LIB')

        static_name = context.get('STATIC_LIBRARY_NAME')
        shared_name = context.get('SHARED_LIBRARY_NAME')

        is_framework = context.get('IS_FRAMEWORK')
        is_component = context.get('IS_COMPONENT')

        soname = context.get('SONAME')

        lib_defines = context.get('LIBRARY_DEFINES')

        shared_args = {}
        static_args = {}

        if final_lib:
            if static_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY implies FORCE_STATIC_LIB. '
                    'Please remove the latter.', context)
            if shared_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. '
                    'Please remove one.', context)
            if is_framework:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_FRAMEWORK. '
                    'Please remove one.', context)
            if is_component:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_COMPONENT. '
                    'Please remove one.', context)
            static_args['link_into'] = final_lib
            static_lib = True

        if libname:
            if is_component:
                if static_lib:
                    raise SandboxValidationError(
                        'IS_COMPONENT conflicts with FORCE_STATIC_LIB. '
                        'Please remove one.', context)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.COMPONENT

            if is_framework:
                if soname:
                    raise SandboxValidationError(
                        'IS_FRAMEWORK conflicts with SONAME. '
                        'Please remove one.', context)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.FRAMEWORK

            if not static_lib and not shared_lib:
                static_lib = True

            if static_name:
                if not static_lib:
                    raise SandboxValidationError(
                        'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB',
                        context)
                static_args['real_name'] = static_name

            if shared_name:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB',
                        context)
                shared_args['real_name'] = shared_name

            if soname:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SONAME requires FORCE_SHARED_LIB', context)
                shared_args['soname'] = soname

            # If both a shared and a static library are created, only the
            # shared library is meant to be a SDK library.
            if context.get('SDK_LIBRARY'):
                if shared_lib:
                    shared_args['is_sdk'] = True
                elif static_lib:
                    static_args['is_sdk'] = True

            if shared_lib and static_lib:
                if not static_name and not shared_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but neither STATIC_LIBRARY_NAME or '
                        'SHARED_LIBRARY_NAME is set. At least one is required.',
                        context)
                if static_name and not shared_name and static_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and SHARED_LIBRARY_NAME is unset. Please either '
                        'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'SHARED_LIBRARY_NAME.', context)
                if shared_name and not static_name and shared_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and STATIC_LIBRARY_NAME is unset. Please either '
                        'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'STATIC_LIBRARY_NAME.', context)
                if shared_name and static_name and shared_name == static_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as '
                        'STATIC_LIBRARY_NAME. Please change one of them.',
                        context)

            if shared_lib:
                lib = SharedLibrary(context, libname, **shared_args)
                self._libs[libname].append(lib)
                self._linkage.append((context, lib, 'USE_LIBS'))
            if static_lib:
                lib = StaticLibrary(context, libname, **static_args)
                self._libs[libname].append(lib)
                self._linkage.append((context, lib, 'USE_LIBS'))

            if lib_defines:
                if not libname:
                    raise SandboxValidationError(
                        'LIBRARY_DEFINES needs a '
                        'LIBRARY_NAME to take effect', context)
                lib.defines.update(lib_defines)

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, package_tests)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # package_tests indicates whether to package test files into the test
        #     package; suites that compile the test files should not install
        #     them into the test package.
        #
        test_manifests = dict(
            A11Y=('a11y', 'testing/mochitest', 'a11y', True),
            BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser',
                            True),
            ANDROID_INSTRUMENTATION=('instrumentation', 'instrumentation', '.',
                                     False),
            JETPACK_PACKAGE=('jetpack-package', 'testing/mochitest',
                             'jetpack-package', True),
            JETPACK_ADDON=('jetpack-addon', 'testing/mochitest',
                           'jetpack-addon', False),
            METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
            MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
            MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
            WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
            XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', True),
        )

        for prefix, info in test_manifests.items():
            for path in context.get('%s_MANIFESTS' % prefix, []):
                for obj in self._process_test_manifest(context, info, path):
                    yield obj

        for flavor in ('crashtest', 'reftest'):
            for path in context.get('%s_MANIFESTS' % flavor.upper(), []):
                for obj in self._process_reftest_manifest(
                        context, flavor, path):
                    yield obj

        jar_manifests = context.get('JAR_MANIFESTS', [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError(
                'While JAR_MANIFESTS is a list, '
                'it is currently limited to one value.', context)

        for path in jar_manifests:
            yield JARManifest(context, mozpath.join(context.srcdir, path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
            if 'jar.mn' not in jar_manifests:
                raise SandboxValidationError(
                    'A jar.mn exists but it '
                    'is not referenced in the moz.build file. '
                    'Please define JAR_MANIFESTS.', context)

        for name, jar in context.get('JAVA_JAR_TARGETS', {}).items():
            yield ContextWrapped(context, jar)

        for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS',
                                      {}).items():
            yield ContextWrapped(context, data)

        if passthru.variables:
            yield passthru
예제 #24
0
    def _process_sources(self, context, passthru):
        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (context[symbol] or []):
                if not os.path.exists(mozpath.join(context.srcdir, src)):
                    raise SandboxValidationError('File listed in %s does not '
                        'exist: \'%s\'' % (symbol, src), context)

        no_pgo = context.get('NO_PGO')
        sources = context.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
                    'cannot be set at the same time', context)
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        # A map from "canonical suffixes" for a particular source file
        # language to the range of suffixes associated with that language.
        #
        # We deliberately don't list the canonical suffix in the suffix list
        # in the definition; we'll add it in programmatically after defining
        # things.
        suffix_map = {
            '.s': set(['.asm']),
            '.c': set(),
            '.m': set(),
            '.mm': set(),
            '.cpp': set(['.cc', '.cxx']),
            '.S': set(),
        }

        # The inverse of the above, mapping suffixes to their canonical suffix.
        canonicalized_suffix_map = {}
        for suffix, alternatives in suffix_map.iteritems():
            alternatives.add(suffix)
            for a in alternatives:
                canonicalized_suffix_map[a] = suffix

        def canonical_suffix_for_file(f):
            return canonicalized_suffix_map[mozpath.splitext(f)[1]]

        # A map from moz.build variables to the canonical suffixes of file
        # kinds that can be listed therein.
        all_suffixes = list(suffix_map.keys())
        varmap = dict(
            SOURCES=(Sources, all_suffixes),
            HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
            UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
            GENERATED_SOURCES=(GeneratedSources, all_suffixes),
        )

        for variable, (klass, suffixes) in varmap.items():
            allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])

            # First ensure that we haven't been given filetypes that we don't
            # recognize.
            for f in context[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in allowed_suffixes:
                    raise SandboxValidationError(
                        '%s has an unknown file type.' % f, context)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

            # Now sort the files to let groupby work.
            sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
            for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
                arglist = [context, list(files), canonical_suffix]
                if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
                    arglist.append(context['FILES_PER_UNIFIED_FILE'])
                yield klass(*arglist)

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(context, f, sources[f].flags)
예제 #25
0
 def _process_webidl_basename(self, basename):
     header = 'mozilla/dom/%sBinding.h' % mozpath.splitext(basename)[0]
     self._install_manifests['dist_include'].add_optional_exists(header)
예제 #26
0
    def _process_sources(self, context, passthru):
        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (context[symbol] or []):
                if not os.path.exists(mozpath.join(context.srcdir, src)):
                    raise SandboxValidationError('File listed in %s does not '
                        'exist: \'%s\'' % (symbol, src), context)

        no_pgo = context.get('NO_PGO')
        sources = context.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
                    'cannot be set at the same time', context)
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        # A map from "canonical suffixes" for a particular source file
        # language to the range of suffixes associated with that language.
        #
        # We deliberately don't list the canonical suffix in the suffix list
        # in the definition; we'll add it in programmatically after defining
        # things.
        suffix_map = {
            '.s': set(['.asm']),
            '.c': set(),
            '.m': set(),
            '.mm': set(),
            '.cpp': set(['.cc', '.cxx']),
            '.S': set(),
        }

        # The inverse of the above, mapping suffixes to their canonical suffix.
        canonicalized_suffix_map = {}
        for suffix, alternatives in suffix_map.iteritems():
            alternatives.add(suffix)
            for a in alternatives:
                canonicalized_suffix_map[a] = suffix

        def canonical_suffix_for_file(f):
            return canonicalized_suffix_map[mozpath.splitext(f)[1]]

        # A map from moz.build variables to the canonical suffixes of file
        # kinds that can be listed therein.
        all_suffixes = list(suffix_map.keys())
        varmap = dict(
            SOURCES=(Sources, all_suffixes),
            HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
            UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
            GENERATED_SOURCES=(GeneratedSources, all_suffixes),
        )

        for variable, (klass, suffixes) in varmap.items():
            allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])

            # First ensure that we haven't been given filetypes that we don't
            # recognize.
            for f in context[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in allowed_suffixes:
                    raise SandboxValidationError(
                        '%s has an unknown file type.' % f, context)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

            # Now sort the files to let groupby work.
            sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
            for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
                arglist = [context, list(files), canonical_suffix]
                if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
                    arglist.append(context['FILES_PER_UNIFIED_FILE'])
                yield klass(*arglist)

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(context, f, sources[f].flags)
예제 #27
0
def read_from_gyp(config, path, output, vars, non_unified_sources = set()):
    """Read a gyp configuration and emits GypContexts for the backend to
    process.

    config is a ConfigEnvironment, path is the path to a root gyp configuration
    file, output is the base path under which the objdir for the various gyp
    dependencies will be, and vars a dict of variables to pass to the gyp
    processor.
    """

    # gyp expects plain str instead of unicode. The frontend code gives us
    # unicode strings, so convert them.
    path = encode(path)
    str_vars = dict((name, encode(value)) for name, value in vars.items())

    params = {
        b'parallel': False,
        b'generator_flags': {},
        b'build_files': [path],
    }

    # Files that gyp_chromium always includes
    includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
    finder = FileFinder(chrome_src, find_executables=False)
    includes.extend(encode(mozpath.join(chrome_src, name))
        for name, _ in finder.find('*/supplement.gypi'))

    # Read the given gyp file and its dependencies.
    generator, flat_list, targets, data = \
        gyp.Load([path], format=b'mozbuild',
            default_variables=str_vars,
            includes=includes,
            depth=encode(mozpath.dirname(path)),
            params=params)

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir  = mozpath.relpath(mozpath.dirname(build_file),
                                  mozpath.dirname(path))
        subdir = '%s_%s' % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(config, mozpath.relpath(
            mozpath.join(output, reldir, subdir), config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]['included_files']:
            context.add_source(mozpath.abspath(mozpath.join(
                mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if spec['type'] == 'none':
            continue
        elif spec['type'] == 'static_library':
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if name.startswith('lib'):
                name = name[3:]
            # The context expects an unicode string.
            context['LIBRARY_NAME'] = name.decode('utf-8')
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            for f in spec.get('sources', []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                s = SourcePath(context, f)
                if ext == '.h':
                    continue
                if ext != '.S' and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)

            # The context expects alphabetical order when adding sources
            context['SOURCES'] = alphabetical_sorted(sources)
            context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            for define in target_conf.get('defines', []):
                if '=' in define:
                    name, value = define.split('=', 1)
                    context['DEFINES'][name] = value
                else:
                    context['DEFINES'][define] = True

            for include in target_conf.get('include_dirs', []):
                # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                #
                # NB: gyp files sometimes have actual absolute paths (e.g.
                # /usr/include32) and sometimes paths that moz.build considers
                # absolute, i.e. starting from topsrcdir. There's no good way
                # to tell them apart here, and the actual absolute paths are
                # likely bogus. In any event, actual absolute paths will be
                # filtered out by trying to find them in topsrcdir.
                if include.startswith('/'):
                    resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
                else:
                    resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
                if not os.path.exists(resolved):
                    continue
                context['LOCAL_INCLUDES'] += [include]

            context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
            flags = target_conf.get('cflags_mozilla', [])
            if flags:
                suffix_map = {
                    '.c': 'CFLAGS',
                    '.cpp': 'CXXFLAGS',
                    '.cc': 'CXXFLAGS',
                    '.m': 'CMFLAGS',
                    '.mm': 'CMMFLAGS',
                }
                variables = (
                    suffix_map[e]
                    for e in extensions if e in suffix_map
                )
                for var in variables:
                    for f in flags:
                        # We may be getting make variable references out of the
                        # gyp data, and we don't want those in emitted data, so
                        # substitute them with their actual value.
                        f = expand_variables(f, config.substs)
                        if not f:
                            continue
                        # the result may be a string or a list.
                        if isinstance(f, types.StringTypes):
                            context[var].append(f)
                        else:
                            context[var].extend(f)
        else:
            # Ignore other types than static_library because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' % spec['type'])

        # Add some features to all contexts. Put here in case LOCAL_INCLUDES
        # order matters.
        context['LOCAL_INCLUDES'] += [
            '!/ipc/ipdl/_ipdlheaders',
            '/ipc/chromium/src',
            '/ipc/glue',
        ]
        # These get set via VC project file settings for normal GYP builds.
        if config.substs['OS_TARGET'] == 'WINNT':
            context['DEFINES']['UNICODE'] = True
            context['DEFINES']['_UNICODE'] = True
        context['DISABLE_STL_WRAPPING'] = True

        yield context
예제 #28
0
    def emit_from_context(self, context):
        """Convert a Context to tree metadata objects.

        This is a generator of mozbuild.frontend.data.ContextDerived instances.
        """
        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_context(context): yield o

        for path in context['CONFIGURE_SUBST_FILES']:
            yield self._create_substitution(ConfigFileSubstitution, context,
                path)

        for path in context['CONFIGURE_DEFINE_FILES']:
            yield self._create_substitution(HeaderFileSubstitution, context,
                path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = context['XPIDL_MODULE']

        if context['XPIDL_SOURCES'] and not xpidl_module:
            raise SandboxValidationError('XPIDL_MODULE must be defined if '
                'XPIDL_SOURCES is defined.', context)

        if xpidl_module and not context['XPIDL_SOURCES']:
            raise SandboxValidationError('XPIDL_MODULE cannot be defined '
                'unless there are XPIDL_SOURCES', context)

        if context['XPIDL_SOURCES'] and context['NO_DIST_INSTALL']:
            self.log(logging.WARN, 'mozbuild_warning', dict(
                path=context.main_path),
                '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.')

        for idl in context['XPIDL_SOURCES']:
            yield XPIDLFile(context, mozpath.join(context.srcdir, idl),
                xpidl_module)

        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (context[symbol] or []):
                if not os.path.exists(mozpath.join(context.srcdir, src)):
                    raise SandboxValidationError('File listed in %s does not '
                        'exist: \'%s\'' % (symbol, src), context)

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(context)
        varlist = [
            'ANDROID_GENERATED_RESFILES',
            'ANDROID_RES_DIRS',
            'DISABLE_STL_WRAPPING',
            'EXTRA_ASSEMBLER_FLAGS',
            'EXTRA_COMPILE_FLAGS',
            'EXTRA_COMPONENTS',
            'EXTRA_DSO_LDOPTS',
            'EXTRA_PP_COMPONENTS',
            'FAIL_ON_WARNINGS',
            'FILES_PER_UNIFIED_FILE',
            'USE_STATIC_LIBS',
            'GENERATED_FILES',
            'IS_GYP_DIR',
            'MSVC_ENABLE_PGO',
            'NO_DIST_INSTALL',
            'PYTHON_UNIT_TESTS',
            'RCFILE',
            'RESFILE',
            'RCINCLUDE',
            'DEFFILE',
            'WIN32_EXE_LDFLAGS',
            'LD_VERSION_SCRIPT',
        ]
        for v in varlist:
            if v in context and context[v]:
                passthru.variables[v] = context[v]

        for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']:
            if v in context and context[v]:
                passthru.variables['MOZBUILD_' + v] = context[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if context['NO_VISIBILITY_FLAGS']:
            passthru.variables['VISIBILITY_FLAGS'] = ''

        if context['DELAYLOAD_DLLS']:
            passthru.variables['DELAYLOAD_LDFLAGS'] = [('-DELAYLOAD:%s' % dll)
                for dll in context['DELAYLOAD_DLLS']]
            passthru.variables['USE_DELAYIMP'] = True

        varmap = dict(
            SOURCES={
                '.s': 'ASFILES',
                '.asm': 'ASFILES',
                '.c': 'CSRCS',
                '.m': 'CMSRCS',
                '.mm': 'CMMSRCS',
                '.cc': 'CPPSRCS',
                '.cpp': 'CPPSRCS',
                '.cxx': 'CPPSRCS',
                '.S': 'SSRCS',
            },
            HOST_SOURCES={
                '.c': 'HOST_CSRCS',
                '.mm': 'HOST_CMMSRCS',
                '.cc': 'HOST_CPPSRCS',
                '.cpp': 'HOST_CPPSRCS',
                '.cxx': 'HOST_CPPSRCS',
            },
            UNIFIED_SOURCES={
                '.c': 'UNIFIED_CSRCS',
                '.mm': 'UNIFIED_CMMSRCS',
                '.cc': 'UNIFIED_CPPSRCS',
                '.cpp': 'UNIFIED_CPPSRCS',
                '.cxx': 'UNIFIED_CPPSRCS',
            }
        )
        varmap.update(dict(('GENERATED_%s' % k, v) for k, v in varmap.items()
                           if k in ('SOURCES', 'UNIFIED_SOURCES')))
        for variable, mapping in varmap.items():
            for f in context[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in mapping:
                    raise SandboxValidationError(
                        '%s has an unknown file type.' % f, context)
                l = passthru.variables.setdefault(mapping[ext], [])
                l.append(f)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

        no_pgo = context.get('NO_PGO')
        sources = context.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
                    'cannot be set at the same time', context)
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(context, f, sources[f].flags)

        exports = context.get('EXPORTS')
        if exports:
            yield Exports(context, exports,
                dist_install=not context.get('NO_DIST_INSTALL', False))

        defines = context.get('DEFINES')
        if defines:
            yield Defines(context, defines)

        resources = context.get('RESOURCE_FILES')
        if resources:
            yield Resources(context, resources, defines)

        for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
            program = context.get(kind)
            if program:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" as %s name, '
                        'because it is already used in %s' % (program, kind,
                        self._binaries[program].relativedir), context)
                self._binaries[program] = cls(context, program)
                self._linkage.append((context, self._binaries[program],
                    kind.replace('PROGRAM', 'USE_LIBS')))

        for kind, cls in [
                ('SIMPLE_PROGRAMS', SimpleProgram),
                ('CPP_UNIT_TESTS', SimpleProgram),
                ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]:
            for program in context[kind]:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" in %s, '
                        'because it is already used in %s' % (program, kind,
                        self._binaries[program].relativedir), context)
                self._binaries[program] = cls(context, program,
                    is_unit_test=kind == 'CPP_UNIT_TESTS')
                self._linkage.append((context, self._binaries[program],
                    'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
                    else 'USE_LIBS'))

        extra_js_modules = context.get('EXTRA_JS_MODULES')
        if extra_js_modules:
            yield JavaScriptModules(context, extra_js_modules, 'extra')

        extra_pp_js_modules = context.get('EXTRA_PP_JS_MODULES')
        if extra_pp_js_modules:
            yield JavaScriptModules(context, extra_pp_js_modules, 'extra_pp')

        test_js_modules = context.get('TESTING_JS_MODULES')
        if test_js_modules:
            yield JavaScriptModules(context, test_js_modules, 'testing')

        simple_lists = [
            ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
            ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
            ('IPDL_SOURCES', IPDLFile),
            ('GENERATED_INCLUDES', GeneratedInclude),
            ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
            ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
            ('TEST_WEBIDL_FILES', TestWebIDLFile),
            ('WEBIDL_FILES', WebIDLFile),
            ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
        ]
        for context_var, klass in simple_lists:
            for name in context.get(context_var, []):
                yield klass(context, name)

        for local_include in context.get('LOCAL_INCLUDES', []):
            if local_include.startswith('/'):
                path = context.config.topsrcdir
                relative_include = local_include[1:]
            else:
                path = context.srcdir
                relative_include = local_include

            actual_include = os.path.join(path, relative_include)
            if not os.path.exists(actual_include):
                raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
                    'does not exist: %s (resolved to %s)' % (local_include, actual_include), context)
            yield LocalInclude(context, local_include)

        if context.get('FINAL_TARGET') or context.get('XPI_NAME') or \
                context.get('DIST_SUBDIR'):
            yield InstallationTarget(context)

        host_libname = context.get('HOST_LIBRARY_NAME')
        libname = context.get('LIBRARY_NAME')

        if host_libname:
            if host_libname == libname:
                raise SandboxValidationError('LIBRARY_NAME and '
                    'HOST_LIBRARY_NAME must have a different value', context)
            lib = HostLibrary(context, host_libname)
            self._libs[host_libname].append(lib)
            self._linkage.append((context, lib, 'HOST_USE_LIBS'))

        final_lib = context.get('FINAL_LIBRARY')
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = context.relsrcdir.replace('/', '_')

        static_lib = context.get('FORCE_STATIC_LIB')
        shared_lib = context.get('FORCE_SHARED_LIB')

        static_name = context.get('STATIC_LIBRARY_NAME')
        shared_name = context.get('SHARED_LIBRARY_NAME')

        is_framework = context.get('IS_FRAMEWORK')
        is_component = context.get('IS_COMPONENT')

        soname = context.get('SONAME')

        shared_args = {}
        static_args = {}

        if final_lib:
            if static_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY implies FORCE_STATIC_LIB. '
                    'Please remove the latter.', context)
            if shared_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. '
                    'Please remove one.', context)
            if is_framework:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_FRAMEWORK. '
                    'Please remove one.', context)
            if is_component:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_COMPONENT. '
                    'Please remove one.', context)
            static_args['link_into'] = final_lib
            static_lib = True

        if libname:
            if is_component:
                if shared_lib:
                    raise SandboxValidationError(
                        'IS_COMPONENT implies FORCE_SHARED_LIB. '
                        'Please remove the latter.', context)
                if is_framework:
                    raise SandboxValidationError(
                        'IS_COMPONENT conflicts with IS_FRAMEWORK. '
                        'Please remove one.', context)
                if static_lib:
                    raise SandboxValidationError(
                        'IS_COMPONENT conflicts with FORCE_STATIC_LIB. '
                        'Please remove one.', context)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.COMPONENT

            if is_framework:
                if shared_lib:
                    raise SandboxValidationError(
                        'IS_FRAMEWORK implies FORCE_SHARED_LIB. '
                        'Please remove the latter.', context)
                if soname:
                    raise SandboxValidationError(
                        'IS_FRAMEWORK conflicts with SONAME. '
                        'Please remove one.', context)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.FRAMEWORK

            if static_name:
                if not static_lib:
                    raise SandboxValidationError(
                        'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB',
                        context)
                static_args['real_name'] = static_name

            if shared_name:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB',
                        context)
                shared_args['real_name'] = shared_name

            if soname:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SONAME requires FORCE_SHARED_LIB', context)
                shared_args['soname'] = soname

            if not static_lib and not shared_lib:
                static_lib = True

            # If both a shared and a static library are created, only the
            # shared library is meant to be a SDK library.
            if context.get('SDK_LIBRARY'):
                if shared_lib:
                    shared_args['is_sdk'] = True
                elif static_lib:
                    static_args['is_sdk'] = True

            if shared_lib and static_lib:
                if not static_name and not shared_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but neither STATIC_LIBRARY_NAME or '
                        'SHARED_LIBRARY_NAME is set. At least one is required.',
                        context)
                if static_name and not shared_name and static_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and SHARED_LIBRARY_NAME is unset. Please either '
                        'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'SHARED_LIBRARY_NAME.', context)
                if shared_name and not static_name and shared_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and STATIC_LIBRARY_NAME is unset. Please either '
                        'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'STATIC_LIBRARY_NAME.', context)
                if shared_name and static_name and shared_name == static_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as '
                        'STATIC_LIBRARY_NAME. Please change one of them.',
                        context)

            if shared_lib:
                lib = SharedLibrary(context, libname, **shared_args)
                self._libs[libname].append(lib)
                self._linkage.append((context, lib, 'USE_LIBS'))
            if static_lib:
                lib = StaticLibrary(context, libname, **static_args)
                self._libs[libname].append(lib)
                self._linkage.append((context, lib, 'USE_LIBS'))

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, active)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # active indicates whether to filter out inactive tests from the
        #     manifest.
        #
        # We ideally don't filter out inactive tests. However, not every test
        # harness can yet deal with test filtering. Once all harnesses can do
        # this, this feature can be dropped.
        test_manifests = dict(
            A11Y=('a11y', 'testing/mochitest', 'a11y', True),
            BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True),
            METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
            MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
            MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
            MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome', 'testing/mochitest', 'webapprtChrome', True),
            WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
            XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', False),
        )

        for prefix, info in test_manifests.items():
            for path in context.get('%s_MANIFESTS' % prefix, []):
                for obj in self._process_test_manifest(context, info, path):
                    yield obj

        for flavor in ('crashtest', 'reftest'):
            for path in context.get('%s_MANIFESTS' % flavor.upper(), []):
                for obj in self._process_reftest_manifest(context, flavor, path):
                    yield obj

        jar_manifests = context.get('JAR_MANIFESTS', [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError('While JAR_MANIFESTS is a list, '
                'it is currently limited to one value.', context)

        for path in jar_manifests:
            yield JARManifest(context, mozpath.join(context.srcdir, path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
            if 'jar.mn' not in jar_manifests:
                raise SandboxValidationError('A jar.mn exists but it '
                    'is not referenced in the moz.build file. '
                    'Please define JAR_MANIFESTS.', context)

        for name, jar in context.get('JAVA_JAR_TARGETS', {}).items():
            yield ContextWrapped(context, jar)

        for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items():
            yield ContextWrapped(context, data)

        if passthru.variables:
            yield passthru
예제 #29
0
파일: data.py 프로젝트: isabella232/juggler
    def all_regular_bindinggen_stems(self):
        for stem in self.all_regular_stems():
            yield '%sBinding' % stem

        for source in self.generated_events_sources:
            yield mozpath.splitext(mozpath.basename(source))[0]
예제 #30
0
def read_from_gyp(config, path, output, vars, non_unified_sources=set()):
    """Read a gyp configuration and emits GypContexts for the backend to
    process.

    config is a ConfigEnvironment, path is the path to a root gyp configuration
    file, output is the base path under which the objdir for the various gyp
    dependencies will be, and vars a dict of variables to pass to the gyp
    processor.
    """

    time_start = time.time()

    # gyp expects plain str instead of unicode. The frontend code gives us
    # unicode strings, so convert them.
    path = encode(path)
    str_vars = dict((name, encode(value)) for name, value in vars.items())

    params = {b"parallel": False, b"generator_flags": {}, b"build_files": [path]}

    # Files that gyp_chromium always includes
    includes = [encode(mozpath.join(script_dir, "common.gypi"))]
    finder = FileFinder(chrome_src, find_executables=False)
    includes.extend(encode(mozpath.join(chrome_src, name)) for name, _ in finder.find("*/supplement.gypi"))

    # Read the given gyp file and its dependencies.
    generator, flat_list, targets, data = gyp.Load(
        [path],
        format=b"mozbuild",
        default_variables=str_vars,
        includes=includes,
        depth=encode(mozpath.dirname(path)),
        params=params,
    )

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets, path.replace(b"/", os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path))
        subdir = "%s_%s" % (mozpath.splitext(mozpath.basename(build_file))[0], target_name)
        # Emit a context for each target.
        context = GypContext(config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]["included_files"]:
            context.add_source(mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = "Debug" if config.substs["MOZ_DEBUG"] else "Release"
        if c not in spec["configurations"]:
            raise RuntimeError("Missing %s gyp configuration for target %s " "in %s" % (c, target_name, build_file))
        target_conf = spec["configurations"][c]

        if spec["type"] == "none":
            continue
        elif spec["type"] == "static_library":
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec["target_name"]
            if name.startswith("lib"):
                name = name[3:]
            # The context expects an unicode string.
            context["LIBRARY_NAME"] = name.decode("utf-8")
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            for f in spec.get("sources", []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                s = SourcePath(context, f)
                if ext == ".h":
                    continue
                if ext != ".S" and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)

            # The context expects alphabetical order when adding sources
            context["SOURCES"] = alphabetical_sorted(sources)
            context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources)

            for define in target_conf.get("defines", []):
                if "=" in define:
                    name, value = define.split("=", 1)
                    context["DEFINES"][name] = value
                else:
                    context["DEFINES"][define] = True

            for include in target_conf.get("include_dirs", []):
                # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                #
                # NB: gyp files sometimes have actual absolute paths (e.g.
                # /usr/include32) and sometimes paths that moz.build considers
                # absolute, i.e. starting from topsrcdir. There's no good way
                # to tell them apart here, and the actual absolute paths are
                # likely bogus. In any event, actual absolute paths will be
                # filtered out by trying to find them in topsrcdir.
                if include.startswith("/"):
                    resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
                else:
                    resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include))
                if not os.path.exists(resolved):
                    continue
                context["LOCAL_INCLUDES"] += [include]

            context["ASFLAGS"] = target_conf.get("asflags_mozilla", [])
            flags = target_conf.get("cflags_mozilla", [])
            if flags:
                suffix_map = {".c": "CFLAGS", ".cpp": "CXXFLAGS", ".cc": "CXXFLAGS", ".m": "CMFLAGS", ".mm": "CMMFLAGS"}
                variables = (suffix_map[e] for e in extensions if e in suffix_map)
                for var in variables:
                    context[var].extend(flags)
        else:
            # Ignore other types than static_library because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError("Unsupported gyp target type: %s" % spec["type"])

        # Add some features to all contexts. Put here in case LOCAL_INCLUDES
        # order matters.
        context["LOCAL_INCLUDES"] += ["/ipc/chromium/src", "/ipc/glue"]
        context["GENERATED_INCLUDES"] += ["/ipc/ipdl/_ipdlheaders"]
        # These get set via VC project file settings for normal GYP builds.
        if config.substs["OS_TARGET"] == "WINNT":
            context["DEFINES"]["UNICODE"] = True
            context["DEFINES"]["_UNICODE"] = True
        context["DISABLE_STL_WRAPPING"] = True

        context.execution_time = time.time() - time_start
        yield context
        time_start = time.time()
예제 #31
0
    def emit_from_sandbox(self, sandbox):
        """Convert a MozbuildSandbox to tree metadata objects.

        This is a generator of mozbuild.frontend.data.SandboxDerived instances.
        """
        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_sandbox(sandbox):
            yield o

        for path in sandbox["CONFIGURE_SUBST_FILES"]:
            yield self._create_substitution(ConfigFileSubstitution, sandbox, path)

        for path in sandbox["CONFIGURE_DEFINE_FILES"]:
            yield self._create_substitution(HeaderFileSubstitution, sandbox, path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = sandbox["XPIDL_MODULE"]

        if sandbox["XPIDL_SOURCES"] and not xpidl_module:
            raise SandboxValidationError("XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.")

        if sandbox["XPIDL_SOURCES"] and sandbox["NO_DIST_INSTALL"]:
            self.log(
                logging.WARN,
                "mozbuild_warning",
                dict(path=sandbox.main_path),
                "{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.",
            )

        for idl in sandbox["XPIDL_SOURCES"]:
            yield XPIDLFile(sandbox, mozpath.join(sandbox["SRCDIR"], idl), xpidl_module)

        for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES"):
            for src in sandbox[symbol] or []:
                if not os.path.exists(mozpath.join(sandbox["SRCDIR"], src)):
                    raise SandboxValidationError(
                        "Reference to a file that "
                        "doesn't exist in %s (%s) in %s" % (symbol, src, sandbox["RELATIVEDIR"])
                    )

        if sandbox.get("LIBXUL_LIBRARY") and sandbox.get("FORCE_STATIC_LIB"):
            raise SandboxValidationError("LIBXUL_LIBRARY implies FORCE_STATIC_LIB")

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(sandbox)
        varmap = dict(
            # Makefile.in : moz.build
            ANDROID_GENERATED_RESFILES="ANDROID_GENERATED_RESFILES",
            ANDROID_RESFILES="ANDROID_RESFILES",
            CPP_UNIT_TESTS="CPP_UNIT_TESTS",
            EXPORT_LIBRARY="EXPORT_LIBRARY",
            EXTRA_COMPONENTS="EXTRA_COMPONENTS",
            EXTRA_JS_MODULES="EXTRA_JS_MODULES",
            EXTRA_PP_COMPONENTS="EXTRA_PP_COMPONENTS",
            EXTRA_PP_JS_MODULES="EXTRA_PP_JS_MODULES",
            FAIL_ON_WARNINGS="FAIL_ON_WARNINGS",
            FILES_PER_UNIFIED_FILE="FILES_PER_UNIFIED_FILE",
            FORCE_SHARED_LIB="FORCE_SHARED_LIB",
            FORCE_STATIC_LIB="FORCE_STATIC_LIB",
            GENERATED_FILES="GENERATED_FILES",
            HOST_LIBRARY_NAME="HOST_LIBRARY_NAME",
            IS_COMPONENT="IS_COMPONENT",
            JS_MODULES_PATH="JS_MODULES_PATH",
            LIBS="LIBS",
            LIBXUL_LIBRARY="LIBXUL_LIBRARY",
            MSVC_ENABLE_PGO="MSVC_ENABLE_PGO",
            NO_DIST_INSTALL="NO_DIST_INSTALL",
            OS_LIBS="OS_LIBS",
            SDK_LIBRARY="SDK_LIBRARY",
        )
        for mak, moz in varmap.items():
            if sandbox[moz]:
                passthru.variables[mak] = sandbox[moz]

        # NO_VISIBILITY_FLAGS is slightly different
        if sandbox["NO_VISIBILITY_FLAGS"]:
            passthru.variables["VISIBILITY_FLAGS"] = ""

        varmap = dict(
            SOURCES={
                ".s": "ASFILES",
                ".asm": "ASFILES",
                ".c": "CSRCS",
                ".m": "CMSRCS",
                ".mm": "CMMSRCS",
                ".cc": "CPPSRCS",
                ".cpp": "CPPSRCS",
                ".S": "SSRCS",
            },
            HOST_SOURCES={".c": "HOST_CSRCS", ".mm": "HOST_CMMSRCS", ".cc": "HOST_CPPSRCS", ".cpp": "HOST_CPPSRCS"},
            UNIFIED_SOURCES={
                ".c": "UNIFIED_CSRCS",
                ".mm": "UNIFIED_CMMSRCS",
                ".cc": "UNIFIED_CPPSRCS",
                ".cpp": "UNIFIED_CPPSRCS",
            },
        )
        varmap.update(dict(("GENERATED_%s" % k, v) for k, v in varmap.items() if k in ("SOURCES", "UNIFIED_SOURCES")))
        for variable, mapping in varmap.items():
            for f in sandbox[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in mapping:
                    raise SandboxValidationError("%s has an unknown file type in %s" % (f, sandbox["RELATIVEDIR"]))
                l = passthru.variables.setdefault(mapping[ext], [])
                l.append(f)
                if variable.startswith("GENERATED_"):
                    l = passthru.variables.setdefault("GARBAGE", [])
                    l.append(f)

        no_pgo = sandbox.get("NO_PGO")
        sources = sandbox.get("SOURCES", [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError("NO_PGO and SOURCES[...].no_pgo cannot be set at the same time")
            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
        if no_pgo_sources:
            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources

        exports = sandbox.get("EXPORTS")
        if exports:
            yield Exports(sandbox, exports, dist_install=not sandbox.get("NO_DIST_INSTALL", False))

        defines = sandbox.get("DEFINES")
        if defines:
            yield Defines(sandbox, defines)

        program = sandbox.get("PROGRAM")
        if program:
            yield Program(sandbox, program, sandbox["CONFIG"]["BIN_SUFFIX"])

        program = sandbox.get("HOST_PROGRAM")
        if program:
            yield HostProgram(sandbox, program, sandbox["CONFIG"]["HOST_BIN_SUFFIX"])

        for program in sandbox["SIMPLE_PROGRAMS"]:
            yield SimpleProgram(sandbox, program, sandbox["CONFIG"]["BIN_SUFFIX"])

        for program in sandbox["HOST_SIMPLE_PROGRAMS"]:
            yield HostSimpleProgram(sandbox, program, sandbox["CONFIG"]["HOST_BIN_SUFFIX"])

        simple_lists = [
            ("GENERATED_EVENTS_WEBIDL_FILES", GeneratedEventWebIDLFile),
            ("GENERATED_WEBIDL_FILES", GeneratedWebIDLFile),
            ("IPDL_SOURCES", IPDLFile),
            ("LOCAL_INCLUDES", LocalInclude),
            ("GENERATED_INCLUDES", GeneratedInclude),
            ("PREPROCESSED_TEST_WEBIDL_FILES", PreprocessedTestWebIDLFile),
            ("PREPROCESSED_WEBIDL_FILES", PreprocessedWebIDLFile),
            ("TEST_WEBIDL_FILES", TestWebIDLFile),
            ("WEBIDL_FILES", WebIDLFile),
        ]
        for sandbox_var, klass in simple_lists:
            for name in sandbox.get(sandbox_var, []):
                yield klass(sandbox, name)

        if sandbox.get("FINAL_TARGET") or sandbox.get("XPI_NAME") or sandbox.get("DIST_SUBDIR"):
            yield InstallationTarget(sandbox)

        libname = sandbox.get("LIBRARY_NAME")
        final_lib = sandbox.get("FINAL_LIBRARY")
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = sandbox["RELATIVEDIR"].replace("/", "_")
        if libname:
            self._libs.setdefault(libname, {})[sandbox["OBJDIR"]] = LibraryDefinition(sandbox, libname)

        if final_lib:
            if sandbox.get("FORCE_STATIC_LIB"):
                raise SandboxValidationError("FINAL_LIBRARY implies FORCE_STATIC_LIB")
            self._final_libs.append((sandbox["OBJDIR"], libname, final_lib))
            passthru.variables["FORCE_STATIC_LIB"] = True

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, active)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # active indicates whether to filter out inactive tests from the
        #     manifest.
        #
        # We ideally don't filter out inactive tests. However, not every test
        # harness can yet deal with test filtering. Once all harnesses can do
        # this, this feature can be dropped.
        test_manifests = dict(
            A11Y=("a11y", "testing/mochitest/a11y", True),
            BROWSER_CHROME=("browser-chrome", "testing/mochitest/browser", True),
            METRO_CHROME=("metro-chrome", "testing/mochitest/metro", True),
            MOCHITEST=("mochitest", "testing/mochitest/tests", True),
            MOCHITEST_CHROME=("chrome", "testing/mochitest/chrome", True),
            MOCHITEST_WEBAPPRT_CHROME=("webapprt-chrome", "testing/mochitest/webapprtChrome", True),
            WEBRTC_SIGNALLING_TEST=("steeplechase", "steeplechase", True),
            XPCSHELL_TESTS=("xpcshell", "xpcshell", False),
        )

        for prefix, info in test_manifests.items():
            for path in sandbox.get("%s_MANIFESTS" % prefix, []):
                for obj in self._process_test_manifest(sandbox, info, path):
                    yield obj

        for name, jar in sandbox.get("JAVA_JAR_TARGETS", {}).items():
            yield SandboxWrapped(sandbox, jar)

        if passthru.variables:
            yield passthru
예제 #32
0
    def emit_from_sandbox(self, sandbox):
        """Convert a MozbuildSandbox to tree metadata objects.

        This is a generator of mozbuild.frontend.data.SandboxDerived instances.
        """
        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_sandbox(sandbox):
            yield o

        for path in sandbox["CONFIGURE_SUBST_FILES"]:
            yield self._create_substitution(ConfigFileSubstitution, sandbox, path)

        for path in sandbox["CONFIGURE_DEFINE_FILES"]:
            yield self._create_substitution(HeaderFileSubstitution, sandbox, path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = sandbox["XPIDL_MODULE"]

        if sandbox["XPIDL_SOURCES"] and not xpidl_module:
            raise SandboxValidationError("XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.")

        if xpidl_module and not sandbox["XPIDL_SOURCES"]:
            raise SandboxValidationError(
                "XPIDL_MODULE cannot be defined " "unless there are XPIDL_SOURCES: %s" % sandbox["RELATIVEDIR"]
            )

        if sandbox["XPIDL_SOURCES"] and sandbox["NO_DIST_INSTALL"]:
            self.log(
                logging.WARN,
                "mozbuild_warning",
                dict(path=sandbox.main_path),
                "{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.",
            )

        for idl in sandbox["XPIDL_SOURCES"]:
            yield XPIDLFile(sandbox, mozpath.join(sandbox["SRCDIR"], idl), xpidl_module)

        for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES"):
            for src in sandbox[symbol] or []:
                if not os.path.exists(mozpath.join(sandbox["SRCDIR"], src)):
                    raise SandboxValidationError(
                        "Reference to a file that "
                        "doesn't exist in %s (%s) in %s" % (symbol, src, sandbox["RELATIVEDIR"])
                    )

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(sandbox)
        varlist = [
            "ANDROID_GENERATED_RESFILES",
            "ANDROID_RES_DIRS",
            "CPP_UNIT_TESTS",
            "DISABLE_STL_WRAPPING",
            "EXPORT_LIBRARY",
            "EXTRA_ASSEMBLER_FLAGS",
            "EXTRA_COMPILE_FLAGS",
            "EXTRA_COMPONENTS",
            "EXTRA_DSO_LDOPTS",
            "EXTRA_JS_MODULES",
            "EXTRA_PP_COMPONENTS",
            "EXTRA_PP_JS_MODULES",
            "FAIL_ON_WARNINGS",
            "FILES_PER_UNIFIED_FILE",
            "FORCE_SHARED_LIB",
            "FORCE_STATIC_LIB",
            "USE_STATIC_LIBS",
            "GENERATED_FILES",
            "HOST_LIBRARY_NAME",
            "IS_COMPONENT",
            "IS_GYP_DIR",
            "JS_MODULES_PATH",
            "LIBS",
            "MSVC_ENABLE_PGO",
            "NO_DIST_INSTALL",
            "OS_LIBS",
            "RCFILE",
            "RESFILE",
            "RCINCLUDE",
            "DEFFILE",
            "SDK_LIBRARY",
            "WIN32_EXE_LDFLAGS",
            "LD_VERSION_SCRIPT",
        ]
        for v in varlist:
            if v in sandbox and sandbox[v]:
                passthru.variables[v] = sandbox[v]

        for v in ["CFLAGS", "CXXFLAGS", "CMFLAGS", "CMMFLAGS", "LDFLAGS"]:
            if v in sandbox and sandbox[v]:
                passthru.variables["MOZBUILD_" + v] = sandbox[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if sandbox["NO_VISIBILITY_FLAGS"]:
            passthru.variables["VISIBILITY_FLAGS"] = ""

        if sandbox["DELAYLOAD_DLLS"]:
            passthru.variables["DELAYLOAD_LDFLAGS"] = [("-DELAYLOAD:%s" % dll) for dll in sandbox["DELAYLOAD_DLLS"]]
            passthru.variables["USE_DELAYIMP"] = True

        varmap = dict(
            SOURCES={
                ".s": "ASFILES",
                ".asm": "ASFILES",
                ".c": "CSRCS",
                ".m": "CMSRCS",
                ".mm": "CMMSRCS",
                ".cc": "CPPSRCS",
                ".cpp": "CPPSRCS",
                ".cxx": "CPPSRCS",
                ".S": "SSRCS",
            },
            HOST_SOURCES={
                ".c": "HOST_CSRCS",
                ".mm": "HOST_CMMSRCS",
                ".cc": "HOST_CPPSRCS",
                ".cpp": "HOST_CPPSRCS",
                ".cxx": "HOST_CPPSRCS",
            },
            UNIFIED_SOURCES={
                ".c": "UNIFIED_CSRCS",
                ".mm": "UNIFIED_CMMSRCS",
                ".cc": "UNIFIED_CPPSRCS",
                ".cpp": "UNIFIED_CPPSRCS",
                ".cxx": "UNIFIED_CPPSRCS",
            },
        )
        varmap.update(dict(("GENERATED_%s" % k, v) for k, v in varmap.items() if k in ("SOURCES", "UNIFIED_SOURCES")))
        for variable, mapping in varmap.items():
            for f in sandbox[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in mapping:
                    raise SandboxValidationError("%s has an unknown file type in %s" % (f, sandbox["RELATIVEDIR"]))
                l = passthru.variables.setdefault(mapping[ext], [])
                l.append(f)
                if variable.startswith("GENERATED_"):
                    l = passthru.variables.setdefault("GARBAGE", [])
                    l.append(f)

        no_pgo = sandbox.get("NO_PGO")
        sources = sandbox.get("SOURCES", [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError("NO_PGO and SOURCES[...].no_pgo cannot be set at the same time")
            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
        if no_pgo_sources:
            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(sandbox, f, sources[f].flags)

        exports = sandbox.get("EXPORTS")
        if exports:
            yield Exports(sandbox, exports, dist_install=not sandbox.get("NO_DIST_INSTALL", False))

        defines = sandbox.get("DEFINES")
        if defines:
            yield Defines(sandbox, defines)

        resources = sandbox.get("RESOURCE_FILES")
        if resources:
            yield Resources(sandbox, resources, defines)

        program = sandbox.get("PROGRAM")
        if program:
            yield Program(sandbox, program, sandbox["CONFIG"]["BIN_SUFFIX"])

        program = sandbox.get("HOST_PROGRAM")
        if program:
            yield HostProgram(sandbox, program, sandbox["CONFIG"]["HOST_BIN_SUFFIX"])

        for program in sandbox["SIMPLE_PROGRAMS"]:
            yield SimpleProgram(sandbox, program, sandbox["CONFIG"]["BIN_SUFFIX"])

        for program in sandbox["HOST_SIMPLE_PROGRAMS"]:
            yield HostSimpleProgram(sandbox, program, sandbox["CONFIG"]["HOST_BIN_SUFFIX"])

        simple_lists = [
            ("GENERATED_EVENTS_WEBIDL_FILES", GeneratedEventWebIDLFile),
            ("GENERATED_WEBIDL_FILES", GeneratedWebIDLFile),
            ("IPDL_SOURCES", IPDLFile),
            ("LOCAL_INCLUDES", LocalInclude),
            ("GENERATED_INCLUDES", GeneratedInclude),
            ("PREPROCESSED_TEST_WEBIDL_FILES", PreprocessedTestWebIDLFile),
            ("PREPROCESSED_WEBIDL_FILES", PreprocessedWebIDLFile),
            ("TEST_WEBIDL_FILES", TestWebIDLFile),
            ("WEBIDL_FILES", WebIDLFile),
            ("WEBIDL_EXAMPLE_INTERFACES", ExampleWebIDLInterface),
        ]
        for sandbox_var, klass in simple_lists:
            for name in sandbox.get(sandbox_var, []):
                yield klass(sandbox, name)

        if sandbox.get("FINAL_TARGET") or sandbox.get("XPI_NAME") or sandbox.get("DIST_SUBDIR"):
            yield InstallationTarget(sandbox)

        libname = sandbox.get("LIBRARY_NAME")
        final_lib = sandbox.get("FINAL_LIBRARY")
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = sandbox["RELATIVEDIR"].replace("/", "_")
        if libname:
            self._libs.setdefault(libname, {})[sandbox["OBJDIR"]] = LibraryDefinition(sandbox, libname)

        if final_lib:
            if isinstance(sandbox, MozbuildSandbox) and sandbox.get("FORCE_STATIC_LIB"):
                raise SandboxValidationError("FINAL_LIBRARY implies FORCE_STATIC_LIB")
            self._final_libs.append((sandbox["OBJDIR"], libname, final_lib))
            passthru.variables["FORCE_STATIC_LIB"] = True

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, active)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # active indicates whether to filter out inactive tests from the
        #     manifest.
        #
        # We ideally don't filter out inactive tests. However, not every test
        # harness can yet deal with test filtering. Once all harnesses can do
        # this, this feature can be dropped.
        test_manifests = dict(
            A11Y=("a11y", "testing/mochitest", "a11y", True),
            BROWSER_CHROME=("browser-chrome", "testing/mochitest", "browser", True),
            METRO_CHROME=("metro-chrome", "testing/mochitest", "metro", True),
            MOCHITEST=("mochitest", "testing/mochitest", "tests", True),
            MOCHITEST_CHROME=("chrome", "testing/mochitest", "chrome", True),
            MOCHITEST_WEBAPPRT_CHROME=("webapprt-chrome", "testing/mochitest", "webapprtChrome", True),
            WEBRTC_SIGNALLING_TEST=("steeplechase", "steeplechase", ".", True),
            XPCSHELL_TESTS=("xpcshell", "xpcshell", ".", False),
        )

        for prefix, info in test_manifests.items():
            for path in sandbox.get("%s_MANIFESTS" % prefix, []):
                for obj in self._process_test_manifest(sandbox, info, path):
                    yield obj

        for flavor in ("crashtest", "reftest"):
            for path in sandbox.get("%s_MANIFESTS" % flavor.upper(), []):
                for obj in self._process_reftest_manifest(sandbox, flavor, path):
                    yield obj

        jar_manifests = sandbox.get("JAR_MANIFESTS", [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError("While JAR_MANIFESTS is a list, " "it is currently limited to one value.")

        for path in jar_manifests:
            yield JARManifest(sandbox, mozpath.join(sandbox["SRCDIR"], path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(sandbox["SRCDIR"], "jar.mn")):
            if "jar.mn" not in jar_manifests:
                raise SandboxValidationError(
                    "A jar.mn exists in %s but it "
                    "is not referenced in the corresponding moz.build file. "
                    "Please define JAR_MANIFESTS in the moz.build file." % sandbox["SRCDIR"]
                )

        for name, jar in sandbox.get("JAVA_JAR_TARGETS", {}).items():
            yield SandboxWrapped(sandbox, jar)

        for name, data in sandbox.get("ANDROID_ECLIPSE_PROJECT_TARGETS", {}).items():
            yield SandboxWrapped(sandbox, data)

        if passthru.variables:
            yield passthru
예제 #33
0
 def canonical_suffix_for_file(f):
     return canonicalized_suffix_map[mozpath.splitext(f)[1]]
예제 #34
0
def read_from_gyp(config, path, output, vars, non_unified_sources=set()):
    """Read a gyp configuration and emits GypSandboxes for the backend to
    process.

    config is a ConfigEnvironment, path is the path to a root gyp configuration
    file, output is the base path under which the objdir for the various gyp
    dependencies will be, and vars a dict of variables to pass to the gyp
    processor.
    """

    time_start = time.time()
    all_sources = set()

    # gyp expects plain str instead of unicode. The frontend code gives us
    # unicode strings, so convert them.
    path = encode(path)
    str_vars = dict((name, encode(value)) for name, value in vars.items())

    params = {
        b'parallel': False,
        b'generator_flags': {},
        b'build_files': [path],
    }

    # Files that gyp_chromium always includes
    includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
    finder = FileFinder(chrome_src, find_executables=False)
    includes.extend(
        encode(mozpath.join(chrome_src, name))
        for name, _ in finder.find('*/supplement.gypi'))

    # Read the given gyp file and its dependencies.
    generator, flat_list, targets, data = \
        gyp.Load([path], format=b'mozbuild',
            default_variables=str_vars,
            includes=includes,
            depth=encode(mozpath.dirname(path)),
            params=params)

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets,
                                        path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
            target)
        # The list of included files returned by gyp are relative to build_file
        included_files = [
            mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))
            for f in data[build_file]['included_files']
        ]
        # Emit a sandbox for each target.
        sandbox = GypSandbox(mozpath.abspath(build_file), included_files)

        with sandbox.allow_all_writes() as d:
            topsrcdir = d['TOPSRCDIR'] = config.topsrcdir
            d['TOPOBJDIR'] = config.topobjdir
            relsrcdir = d['RELATIVEDIR'] = mozpath.relpath(
                mozpath.dirname(build_file), config.topsrcdir)
            d['SRCDIR'] = mozpath.join(topsrcdir, relsrcdir)

            # Each target is given its own objdir. The base of that objdir
            # is derived from the relative path from the root gyp file path
            # to the current build_file, placed under the given output
            # directory. Since several targets can be in a given build_file,
            # separate them in subdirectories using the build_file basename
            # and the target_name.
            reldir = mozpath.relpath(mozpath.dirname(build_file),
                                     mozpath.dirname(path))
            subdir = '%s_%s' % (
                mozpath.splitext(mozpath.basename(build_file))[0],
                target_name,
            )
            d['OBJDIR'] = mozpath.join(output, reldir, subdir)
            d['IS_GYP_DIR'] = True

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if spec['type'] == 'none':
            continue
        elif spec['type'] == 'static_library':
            sandbox['FORCE_STATIC_LIB'] = True
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if name.startswith('lib'):
                name = name[3:]
            # The sandbox expects an unicode string.
            sandbox['LIBRARY_NAME'] = name.decode('utf-8')
            # gyp files contain headers and asm sources in sources lists.
            sources = set(
                mozpath.normpath(mozpath.join(sandbox['SRCDIR'], f))
                for f in spec.get('sources', [])
                if mozpath.splitext(f)[-1] != '.h')
            asm_sources = set(f for f in sources if f.endswith('.S'))

            unified_sources = sources - non_unified_sources - asm_sources
            sources -= unified_sources
            all_sources |= sources
            # The sandbox expects alphabetical order when adding sources
            sandbox['SOURCES'] = alphabetical_sorted(sources)
            sandbox['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            for define in target_conf.get('defines', []):
                if '=' in define:
                    name, value = define.split('=', 1)
                    sandbox['DEFINES'][name] = value
                else:
                    sandbox['DEFINES'][define] = True

            for include in target_conf.get('include_dirs', []):
                sandbox['LOCAL_INCLUDES'] += [include]

            with sandbox.allow_all_writes() as d:
                d['EXTRA_ASSEMBLER_FLAGS'] = target_conf.get(
                    'asflags_mozilla', [])
                d['EXTRA_COMPILE_FLAGS'] = target_conf.get(
                    'cflags_mozilla', [])
        else:
            # Ignore other types than static_library because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' %
                                      spec['type'])

        sandbox.execution_time = time.time() - time_start
        yield sandbox
        time_start = time.time()
예제 #35
0
 def test_splitext(self):
     self.assertEqual(splitext(os.path.join("foo", "bar", "baz.qux")), ("foo/bar/baz", ".qux"))
예제 #36
0
 def test_splitext(self):
     self.assertEqual(splitext(self.SEP.join(("foo", "bar", "baz.qux"))),
                      ("foo/bar/baz", ".qux"))
예제 #37
0
    def emit_from_sandbox(self, sandbox):
        """Convert a MozbuildSandbox to tree metadata objects.

        This is a generator of mozbuild.frontend.data.SandboxDerived instances.
        """
        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o

        for path in sandbox['CONFIGURE_SUBST_FILES']:
            yield self._create_substitution(ConfigFileSubstitution, sandbox,
                path)

        for path in sandbox['CONFIGURE_DEFINE_FILES']:
            yield self._create_substitution(HeaderFileSubstitution, sandbox,
                path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = sandbox['XPIDL_MODULE']

        if sandbox['XPIDL_SOURCES'] and not xpidl_module:
            raise SandboxValidationError('XPIDL_MODULE must be defined if '
                'XPIDL_SOURCES is defined.')

        if xpidl_module and not sandbox['XPIDL_SOURCES']:
            raise SandboxValidationError('XPIDL_MODULE cannot be defined '
                'unless there are XPIDL_SOURCES: %s' % sandbox['RELATIVEDIR'])

        if sandbox['XPIDL_SOURCES'] and sandbox['NO_DIST_INSTALL']:
            self.log(logging.WARN, 'mozbuild_warning', dict(
                path=sandbox.main_path),
                '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.')

        for idl in sandbox['XPIDL_SOURCES']:
            yield XPIDLFile(sandbox, mozpath.join(sandbox['SRCDIR'], idl),
                xpidl_module)

        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (sandbox[symbol] or []):
                if not os.path.exists(mozpath.join(sandbox['SRCDIR'], src)):
                    raise SandboxValidationError('Reference to a file that '
                        'doesn\'t exist in %s (%s) in %s'
                        % (symbol, src, sandbox['RELATIVEDIR']))

        if sandbox.get('LIBXUL_LIBRARY') and sandbox.get('FORCE_STATIC_LIB'):
            raise SandboxValidationError('LIBXUL_LIBRARY implies FORCE_STATIC_LIB')

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(sandbox)
        varlist = [
            'ANDROID_GENERATED_RESFILES',
            'ANDROID_RES_DIRS',
            'CPP_UNIT_TESTS',
            'EXPORT_LIBRARY',
            'EXTRA_ASSEMBLER_FLAGS',
            'EXTRA_COMPILE_FLAGS',
            'EXTRA_COMPONENTS',
            'EXTRA_JS_MODULES',
            'EXTRA_PP_COMPONENTS',
            'EXTRA_PP_JS_MODULES',
            'FAIL_ON_WARNINGS',
            'FILES_PER_UNIFIED_FILE',
            'FORCE_SHARED_LIB',
            'FORCE_STATIC_LIB',
            'USE_STATIC_LIBS',
            'GENERATED_FILES',
            'HOST_LIBRARY_NAME',
            'IS_COMPONENT',
            'IS_GYP_DIR',
            'JS_MODULES_PATH',
            'LIBS',
            'LIBXUL_LIBRARY',
            'MSVC_ENABLE_PGO',
            'NO_DIST_INSTALL',
            'OS_LIBS',
            'RCFILE',
            'RESFILE',
            'DEFFILE',
            'SDK_LIBRARY',
            'CFLAGS',
            'CXXFLAGS',
            'LDFLAGS',
            'WIN32_EXE_LDFLAGS',
        ]
        for v in varlist:
            if v in sandbox and sandbox[v]:
                passthru.variables[v] = sandbox[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if sandbox['NO_VISIBILITY_FLAGS']:
            passthru.variables['VISIBILITY_FLAGS'] = ''

        if sandbox['DELAYLOAD_DLLS']:
            passthru.variables['DELAYLOAD_LDFLAGS'] = [('-DELAYLOAD:%s' % dll) for dll in sandbox['DELAYLOAD_DLLS']]
            passthru.variables['USE_DELAYIMP'] = True

        varmap = dict(
            SOURCES={
                '.s': 'ASFILES',
                '.asm': 'ASFILES',
                '.c': 'CSRCS',
                '.m': 'CMSRCS',
                '.mm': 'CMMSRCS',
                '.cc': 'CPPSRCS',
                '.cpp': 'CPPSRCS',
                '.S': 'SSRCS',
            },
            HOST_SOURCES={
                '.c': 'HOST_CSRCS',
                '.mm': 'HOST_CMMSRCS',
                '.cc': 'HOST_CPPSRCS',
                '.cpp': 'HOST_CPPSRCS',
            },
            UNIFIED_SOURCES={
                '.c': 'UNIFIED_CSRCS',
                '.mm': 'UNIFIED_CMMSRCS',
                '.cc': 'UNIFIED_CPPSRCS',
                '.cpp': 'UNIFIED_CPPSRCS',
            }
        )
        varmap.update(dict(('GENERATED_%s' % k, v) for k, v in varmap.items()
                           if k in ('SOURCES', 'UNIFIED_SOURCES')))
        for variable, mapping in varmap.items():
            for f in sandbox[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in mapping:
                    raise SandboxValidationError('%s has an unknown file type in %s' % (f, sandbox['RELATIVEDIR']))
                l = passthru.variables.setdefault(mapping[ext], [])
                l.append(f)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

        no_pgo = sandbox.get('NO_PGO')
        sources = sandbox.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo cannot be set at the same time')
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        exports = sandbox.get('EXPORTS')
        if exports:
            yield Exports(sandbox, exports,
                dist_install=not sandbox.get('NO_DIST_INSTALL', False))

        defines = sandbox.get('DEFINES')
        if defines:
            yield Defines(sandbox, defines)

        resources = sandbox.get('RESOURCE_FILES')
        if resources:
            yield Resources(sandbox, resources, defines)

        program = sandbox.get('PROGRAM')
        if program:
            yield Program(sandbox, program, sandbox['CONFIG']['BIN_SUFFIX'])

        program = sandbox.get('HOST_PROGRAM')
        if program:
            yield HostProgram(sandbox, program, sandbox['CONFIG']['HOST_BIN_SUFFIX'])

        for program in sandbox['SIMPLE_PROGRAMS']:
            yield SimpleProgram(sandbox, program, sandbox['CONFIG']['BIN_SUFFIX'])

        for program in sandbox['HOST_SIMPLE_PROGRAMS']:
            yield HostSimpleProgram(sandbox, program, sandbox['CONFIG']['HOST_BIN_SUFFIX'])

        simple_lists = [
            ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
            ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
            ('IPDL_SOURCES', IPDLFile),
            ('LOCAL_INCLUDES', LocalInclude),
            ('GENERATED_INCLUDES', GeneratedInclude),
            ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
            ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
            ('TEST_WEBIDL_FILES', TestWebIDLFile),
            ('WEBIDL_FILES', WebIDLFile),
            ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
        ]
        for sandbox_var, klass in simple_lists:
            for name in sandbox.get(sandbox_var, []):
                yield klass(sandbox, name)

        if sandbox.get('FINAL_TARGET') or sandbox.get('XPI_NAME') or \
                sandbox.get('DIST_SUBDIR'):
            yield InstallationTarget(sandbox)

        libname = sandbox.get('LIBRARY_NAME')
        final_lib = sandbox.get('FINAL_LIBRARY')
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = sandbox['RELATIVEDIR'].replace('/', '_')
        if libname:
            self._libs.setdefault(libname, {})[sandbox['OBJDIR']] = \
                LibraryDefinition(sandbox, libname)

        if final_lib:
            if isinstance(sandbox, MozbuildSandbox) and sandbox.get('FORCE_STATIC_LIB'):
                raise SandboxValidationError('FINAL_LIBRARY implies FORCE_STATIC_LIB')
            self._final_libs.append((sandbox['OBJDIR'], libname, final_lib))
            passthru.variables['FORCE_STATIC_LIB'] = True

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, active)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # active indicates whether to filter out inactive tests from the
        #     manifest.
        #
        # We ideally don't filter out inactive tests. However, not every test
        # harness can yet deal with test filtering. Once all harnesses can do
        # this, this feature can be dropped.
        test_manifests = dict(
            A11Y=('a11y', 'testing/mochitest', 'a11y', True),
            BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True),
            METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
            MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
            MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
            MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome', 'testing/mochitest', 'webapprtChrome', True),
            WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
            XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', False),
        )

        for prefix, info in test_manifests.items():
            for path in sandbox.get('%s_MANIFESTS' % prefix, []):
                for obj in self._process_test_manifest(sandbox, info, path):
                    yield obj

        jar_manifests = sandbox.get('JAR_MANIFESTS', [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError('While JAR_MANIFESTS is a list, '
                'it is currently limited to one value.')

        for path in jar_manifests:
            yield JARManifest(sandbox, mozpath.join(sandbox['SRCDIR'], path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(sandbox['SRCDIR'], 'jar.mn')):
            if 'jar.mn' not in jar_manifests:
                raise SandboxValidationError('A jar.mn exists in %s but it '
                    'is not referenced in the corresponding moz.build file. '
                    'Please define JAR_MANIFESTS in the moz.build file.' %
                    sandbox['SRCDIR'])

        for name, jar in sandbox.get('JAVA_JAR_TARGETS', {}).items():
            yield SandboxWrapped(sandbox, jar)

        for name, data in sandbox.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items():
            yield SandboxWrapped(sandbox, data)

        if passthru.variables:
            yield passthru
예제 #38
0
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
                      sandbox_vars, mozilla_flags):
    # Translates a json gn config into attributes that can be used to write out
    # moz.build files for this configuration.

    # Much of this code is based on similar functionality in `gyp_reader.py`.

    mozbuild_attrs = {'mozbuild_args': gn_config.get('mozbuild_args', None),
                      'dirs': {}}

    targets = gn_config["targets"]

    project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir)

    def target_info(fullname):
        path, name = target_fullname.split(':')
        # Stripping '//' gives us a path relative to the project root,
        # adding a suffix avoids name collisions with libraries already
        # in the tree (like "webrtc").
        return path.lstrip('//'), name + '_gn'

    # Process all targets from the given gn project and its dependencies.
    for target_fullname, spec in targets.iteritems():

        target_path, target_name = target_info(target_fullname)
        context_attrs = {}

        # Remove leading 'lib' from the target_name if any, and use as
        # library name.
        name = target_name
        if spec['type'] in ('static_library', 'shared_library', 'source_set'):
            if name.startswith('lib'):
                name = name[3:]
            context_attrs['LIBRARY_NAME'] = name.decode('utf-8')
        else:
            raise Exception('The following GN target type is not currently '
                            'consumed by moz.build: "%s". It may need to be '
                            'added, or you may need to re-run the '
                            '`GnConfigGen` step.' % spec['type'])

        if spec['type'] == 'shared_library':
            context_attrs['FORCE_SHARED_LIB'] = True

        sources = []
        unified_sources = []
        extensions = set()
        use_defines_in_asflags = False

        for f in spec.get('sources', []):
            f = f.lstrip("//")
            ext = mozpath.splitext(f)[-1]
            extensions.add(ext)
            src = '%s/%s' % (project_relsrcdir, f)
            if ext == '.h':
                continue
            elif ext == '.def':
                context_attrs['SYMBOLS_FILE'] = src
            elif ext != '.S' and src not in non_unified_sources:
                unified_sources.append('/%s' % src)
            else:
                sources.append('/%s' % src)
            # The Mozilla build system doesn't use DEFINES for building
            # ASFILES.
            if ext == '.s':
                use_defines_in_asflags = True

        context_attrs['SOURCES'] = sources
        context_attrs['UNIFIED_SOURCES'] = unified_sources

        context_attrs['DEFINES'] = {}
        for define in spec.get('defines', []):
            if '=' in define:
                name, value = define.split('=', 1)
                context_attrs['DEFINES'][name] = value
            else:
                context_attrs['DEFINES'][define] = True

        context_attrs['LOCAL_INCLUDES'] = []
        for include in spec.get('include_dirs', []):
            # GN will have resolved all these paths relative to the root of
            # the project indicated by "//".
            if include.startswith('//'):
                include = include[2:]
            # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
            if include.startswith('/'):
                resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:]))
            else:
                resolved = mozpath.abspath(mozpath.join(srcdir, include))
            if not os.path.exists(resolved):
                # GN files may refer to include dirs that are outside of the
                # tree or we simply didn't vendor. Print a warning in this case.
                if not resolved.endswith('gn-output/gen'):
                    print("Included path: '%s' does not exist, dropping include from GN "
                          "configuration." % resolved, file=sys.stderr)
                continue
            if not include.startswith('/'):
                include = '/%s/%s' % (project_relsrcdir, include)
            context_attrs['LOCAL_INCLUDES'] += [include]

        context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
        if use_defines_in_asflags and defines:
            context_attrs['ASFLAGS'] += ['-D' + d for d in defines]
        flags = [f for f in spec.get('cflags', []) if f in mozilla_flags]
        if flags:
            suffix_map = {
                '.c': 'CFLAGS',
                '.cpp': 'CXXFLAGS',
                '.cc': 'CXXFLAGS',
                '.m': 'CMFLAGS',
                '.mm': 'CMMFLAGS',
            }
            variables = (suffix_map[e] for e in extensions if e in suffix_map)
            for var in variables:
                for f in flags:
                    # We may be getting make variable references out of the
                    # gn data, and we don't want those in emitted data, so
                    # substitute them with their actual value.
                    f = expand_variables(f, config.substs).split()
                    if not f:
                        continue
                    # the result may be a string or a list.
                    if isinstance(f, types.StringTypes):
                        context_attrs.setdefault(var, []).append(f)
                    else:
                        context_attrs.setdefault(var, []).extend(f)

        context_attrs['OS_LIBS'] = []
        for lib in spec.get('libs', []):
            lib_name = os.path.splitext(lib)[0]
            if lib.endswith('.framework'):
                context_attrs['OS_LIBS'] += ['-framework ' + lib_name]
            else:
                context_attrs['OS_LIBS'] += [lib_name]

        # Add some features to all contexts. Put here in case LOCAL_INCLUDES
        # order matters.
        context_attrs['LOCAL_INCLUDES'] += [
            '!/ipc/ipdl/_ipdlheaders',
            '/ipc/chromium/src',
            '/ipc/glue',
        ]
        # These get set via VC project file settings for normal GYP builds.
        # TODO: Determine if these defines are needed for GN builds.
        if gn_config['mozbuild_args']['OS_TARGET'] == 'WINNT':
            context_attrs['DEFINES']['UNICODE'] = True
            context_attrs['DEFINES']['_UNICODE'] = True

        context_attrs['COMPILE_FLAGS'] = {
            'STL': [],
            'OS_INCLUDES': [],
        }

        for key, value in sandbox_vars.items():
            if context_attrs.get(key) and isinstance(context_attrs[key], list):
                # If we have a key from sandbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context_attrs[key] = value + context_attrs[key]
            elif context_attrs.get(key) and isinstance(context_attrs[key], dict):
                context_attrs[key].update(value)
            else:
                context_attrs[key] = value

        target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name)
        mozbuild_attrs['dirs'][target_relsrcdir] = context_attrs

    return mozbuild_attrs
예제 #39
0
    def emit_from_context(self, context):
        """Convert a Context to tree metadata objects.

        This is a generator of mozbuild.frontend.data.ContextDerived instances.
        """

        # We only want to emit an InstallationTarget if one of the consulted
        # variables is defined. Later on, we look up FINAL_TARGET, which has
        # the side-effect of populating it. So, we need to do this lookup
        # early.
        if any(k in context for k in ('FINAL_TARGET', 'XPI_NAME', 'DIST_SUBDIR')):
            yield InstallationTarget(context)

        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_context(context): yield o

        for path in context['CONFIGURE_SUBST_FILES']:
            yield self._create_substitution(ConfigFileSubstitution, context,
                path)

        for path in context['CONFIGURE_DEFINE_FILES']:
            yield self._create_substitution(HeaderFileSubstitution, context,
                path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = context['XPIDL_MODULE']

        if context['XPIDL_SOURCES'] and not xpidl_module:
            raise SandboxValidationError('XPIDL_MODULE must be defined if '
                'XPIDL_SOURCES is defined.', context)

        if xpidl_module and not context['XPIDL_SOURCES']:
            raise SandboxValidationError('XPIDL_MODULE cannot be defined '
                'unless there are XPIDL_SOURCES', context)

        if context['XPIDL_SOURCES'] and context['NO_DIST_INSTALL']:
            self.log(logging.WARN, 'mozbuild_warning', dict(
                path=context.main_path),
                '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.')

        for idl in context['XPIDL_SOURCES']:
            yield XPIDLFile(context, mozpath.join(context.srcdir, idl),
                xpidl_module)

        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (context[symbol] or []):
                if not os.path.exists(mozpath.join(context.srcdir, src)):
                    raise SandboxValidationError('File listed in %s does not '
                        'exist: \'%s\'' % (symbol, src), context)

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(context)
        varlist = [
            'ANDROID_GENERATED_RESFILES',
            'ANDROID_RES_DIRS',
            'DISABLE_STL_WRAPPING',
            'EXTRA_ASSEMBLER_FLAGS',
            'EXTRA_COMPILE_FLAGS',
            'EXTRA_COMPONENTS',
            'EXTRA_DSO_LDOPTS',
            'EXTRA_PP_COMPONENTS',
            'FAIL_ON_WARNINGS',
            'USE_STATIC_LIBS',
            'IS_GYP_DIR',
            'MSVC_ENABLE_PGO',
            'NO_DIST_INSTALL',
            'PYTHON_UNIT_TESTS',
            'RCFILE',
            'RESFILE',
            'RCINCLUDE',
            'DEFFILE',
            'WIN32_EXE_LDFLAGS',
            'LD_VERSION_SCRIPT',
        ]
        for v in varlist:
            if v in context and context[v]:
                passthru.variables[v] = context[v]

        if context.config.substs.get('OS_TARGET') == 'WINNT' and \
                context['DELAYLOAD_DLLS']:
            context['LDFLAGS'].extend([('-DELAYLOAD:%s' % dll)
                for dll in context['DELAYLOAD_DLLS']])
            context['OS_LIBS'].append('delayimp')

        for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']:
            if v in context and context[v]:
                passthru.variables['MOZBUILD_' + v] = context[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if context['NO_VISIBILITY_FLAGS']:
            passthru.variables['VISIBILITY_FLAGS'] = ''

        no_pgo = context.get('NO_PGO')
        sources = context.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
                    'cannot be set at the same time', context)
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        # A map from "canonical suffixes" for a particular source file
        # language to the range of suffixes associated with that language.
        #
        # We deliberately don't list the canonical suffix in the suffix list
        # in the definition; we'll add it in programmatically after defining
        # things.
        suffix_map = {
            '.s': set(['.asm']),
            '.c': set(),
            '.m': set(),
            '.mm': set(),
            '.cpp': set(['.cc', '.cxx']),
            '.S': set(),
        }

        # The inverse of the above, mapping suffixes to their canonical suffix.
        canonicalized_suffix_map = {}
        for suffix, alternatives in suffix_map.iteritems():
            alternatives.add(suffix)
            for a in alternatives:
                canonicalized_suffix_map[a] = suffix

        def canonical_suffix_for_file(f):
            return canonicalized_suffix_map[mozpath.splitext(f)[1]]

        # A map from moz.build variables to the canonical suffixes of file
        # kinds that can be listed therein.
        all_suffixes = list(suffix_map.keys())
        varmap = dict(
            SOURCES=(Sources, all_suffixes),
            HOST_SOURCES=(HostSources, ['.c', '.mm', '.cpp']),
            UNIFIED_SOURCES=(UnifiedSources, ['.c', '.mm', '.cpp']),
            GENERATED_SOURCES=(GeneratedSources, all_suffixes),
        )

        for variable, (klass, suffixes) in varmap.items():
            allowed_suffixes = set().union(*[suffix_map[s] for s in suffixes])

            # First ensure that we haven't been given filetypes that we don't
            # recognize.
            for f in context[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in allowed_suffixes:
                    raise SandboxValidationError(
                        '%s has an unknown file type.' % f, context)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

            # Now sort the files to let groupby work.
            sorted_files = sorted(context[variable], key=canonical_suffix_for_file)
            for canonical_suffix, files in itertools.groupby(sorted_files, canonical_suffix_for_file):
                arglist = [context, list(files), canonical_suffix]
                if variable.startswith('UNIFIED_') and 'FILES_PER_UNIFIED_FILE' in context:
                    arglist.append(context['FILES_PER_UNIFIED_FILE'])
                yield klass(*arglist)

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(context, f, sources[f].flags)

        exports = context.get('EXPORTS')
        if exports:
            yield Exports(context, exports,
                dist_install=not context.get('NO_DIST_INSTALL', False))

        generated_files = context.get('GENERATED_FILES')
        if generated_files:
            for f in generated_files:
                flags = generated_files[f]
                output = f
                if flags.script:
                    script = mozpath.join(context.srcdir, flags.script)
                    inputs = [mozpath.join(context.srcdir, i) for i in flags.inputs]

                    if not os.path.exists(script):
                        raise SandboxValidationError(
                            'Script for generating %s does not exist: %s'
                            % (f, script), context)
                    if os.path.splitext(script)[1] != '.py':
                        raise SandboxValidationError(
                            'Script for generating %s does not end in .py: %s'
                            % (f, script), context)
                    for i in inputs:
                        if not os.path.exists(i):
                            raise SandboxValidationError(
                                'Input for generating %s does not exist: %s'
                                % (f, i), context)
                else:
                    script = None
                    inputs = []
                yield GeneratedFile(context, script, output, inputs)

        test_harness_files = context.get('TEST_HARNESS_FILES')
        if test_harness_files:
            srcdir_files = defaultdict(list)
            srcdir_pattern_files = defaultdict(list)
            objdir_files = defaultdict(list)

            for path, strings in test_harness_files.walk():
                if not path and strings:
                    raise SandboxValidationError(
                        'Cannot install files to the root of TEST_HARNESS_FILES', context)

                for s in strings:
                    if context.is_objdir_path(s):
                        if s.startswith('!/'):
                            objdir_files[path].append('$(DEPTH)/%s' % s[2:])
                        else:
                            objdir_files[path].append(s[1:])
                    else:
                        resolved = context.resolve_path(s)
                        if '*' in s:
                            srcdir_pattern_files[path].append(s);
                        elif not os.path.exists(resolved):
                            raise SandboxValidationError(
                                'File listed in TEST_HARNESS_FILES does not exist: %s' % s, context)
                        else:
                            srcdir_files[path].append(resolved)

            yield TestHarnessFiles(context, srcdir_files,
                                   srcdir_pattern_files, objdir_files)

        defines = context.get('DEFINES')
        if defines:
            yield Defines(context, defines)

        resources = context.get('RESOURCE_FILES')
        if resources:
            yield Resources(context, resources, defines)

        for pref in sorted(context['JS_PREFERENCE_FILES']):
            yield JsPreferenceFile(context, pref)

        for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
            program = context.get(kind)
            if program:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" as %s name, '
                        'because it is already used in %s' % (program, kind,
                        self._binaries[program].relativedir), context)
                self._binaries[program] = cls(context, program)
                self._linkage.append((context, self._binaries[program],
                    kind.replace('PROGRAM', 'USE_LIBS')))

        for kind, cls in [
                ('SIMPLE_PROGRAMS', SimpleProgram),
                ('CPP_UNIT_TESTS', SimpleProgram),
                ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]:
            for program in context[kind]:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" in %s, '
                        'because it is already used in %s' % (program, kind,
                        self._binaries[program].relativedir), context)
                self._binaries[program] = cls(context, program,
                    is_unit_test=kind == 'CPP_UNIT_TESTS')
                self._linkage.append((context, self._binaries[program],
                    'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
                    else 'USE_LIBS'))

        extra_js_modules = context.get('EXTRA_JS_MODULES')
        if extra_js_modules:
            yield JavaScriptModules(context, extra_js_modules, 'extra')

        extra_pp_js_modules = context.get('EXTRA_PP_JS_MODULES')
        if extra_pp_js_modules:
            yield JavaScriptModules(context, extra_pp_js_modules, 'extra_pp')

        test_js_modules = context.get('TESTING_JS_MODULES')
        if test_js_modules:
            yield JavaScriptModules(context, test_js_modules, 'testing')

        simple_lists = [
            ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
            ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
            ('IPDL_SOURCES', IPDLFile),
            ('GENERATED_INCLUDES', GeneratedInclude),
            ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
            ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
            ('TEST_WEBIDL_FILES', TestWebIDLFile),
            ('WEBIDL_FILES', WebIDLFile),
            ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
        ]
        for context_var, klass in simple_lists:
            for name in context.get(context_var, []):
                yield klass(context, name)

        for local_include in context.get('LOCAL_INCLUDES', []):
            if local_include.startswith('/'):
                path = context.config.topsrcdir
                relative_include = local_include[1:]
            else:
                path = context.srcdir
                relative_include = local_include

            actual_include = os.path.join(path, relative_include)
            if not os.path.exists(actual_include):
                raise SandboxValidationError('Path specified in LOCAL_INCLUDES '
                    'does not exist: %s (resolved to %s)' % (local_include, actual_include), context)
            yield LocalInclude(context, local_include)

        final_target_files = context.get('FINAL_TARGET_FILES')
        if final_target_files:
            yield FinalTargetFiles(context, final_target_files)

        host_libname = context.get('HOST_LIBRARY_NAME')
        libname = context.get('LIBRARY_NAME')

        if host_libname:
            if host_libname == libname:
                raise SandboxValidationError('LIBRARY_NAME and '
                    'HOST_LIBRARY_NAME must have a different value', context)
            lib = HostLibrary(context, host_libname)
            self._libs[host_libname].append(lib)
            self._linkage.append((context, lib, 'HOST_USE_LIBS'))

        final_lib = context.get('FINAL_LIBRARY')
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = context.relsrcdir.replace('/', '_')

        static_lib = context.get('FORCE_STATIC_LIB')
        shared_lib = context.get('FORCE_SHARED_LIB')

        static_name = context.get('STATIC_LIBRARY_NAME')
        shared_name = context.get('SHARED_LIBRARY_NAME')

        is_framework = context.get('IS_FRAMEWORK')
        is_component = context.get('IS_COMPONENT')

        soname = context.get('SONAME')

        lib_defines = context.get('LIBRARY_DEFINES')

        shared_args = {}
        static_args = {}

        if final_lib:
            if static_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY implies FORCE_STATIC_LIB. '
                    'Please remove the latter.', context)
            if shared_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. '
                    'Please remove one.', context)
            if is_framework:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_FRAMEWORK. '
                    'Please remove one.', context)
            if is_component:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_COMPONENT. '
                    'Please remove one.', context)
            static_args['link_into'] = final_lib
            static_lib = True

        if libname:
            if is_component:
                if static_lib:
                    raise SandboxValidationError(
                        'IS_COMPONENT conflicts with FORCE_STATIC_LIB. '
                        'Please remove one.', context)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.COMPONENT

            if is_framework:
                if soname:
                    raise SandboxValidationError(
                        'IS_FRAMEWORK conflicts with SONAME. '
                        'Please remove one.', context)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.FRAMEWORK

            if not static_lib and not shared_lib:
                static_lib = True

            if static_name:
                if not static_lib:
                    raise SandboxValidationError(
                        'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB',
                        context)
                static_args['real_name'] = static_name

            if shared_name:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB',
                        context)
                shared_args['real_name'] = shared_name

            if soname:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SONAME requires FORCE_SHARED_LIB', context)
                shared_args['soname'] = soname

            # If both a shared and a static library are created, only the
            # shared library is meant to be a SDK library.
            if context.get('SDK_LIBRARY'):
                if shared_lib:
                    shared_args['is_sdk'] = True
                elif static_lib:
                    static_args['is_sdk'] = True

            if shared_lib and static_lib:
                if not static_name and not shared_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but neither STATIC_LIBRARY_NAME or '
                        'SHARED_LIBRARY_NAME is set. At least one is required.',
                        context)
                if static_name and not shared_name and static_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and SHARED_LIBRARY_NAME is unset. Please either '
                        'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'SHARED_LIBRARY_NAME.', context)
                if shared_name and not static_name and shared_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and STATIC_LIBRARY_NAME is unset. Please either '
                        'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'STATIC_LIBRARY_NAME.', context)
                if shared_name and static_name and shared_name == static_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as '
                        'STATIC_LIBRARY_NAME. Please change one of them.',
                        context)

            if shared_lib:
                lib = SharedLibrary(context, libname, **shared_args)
                self._libs[libname].append(lib)
                self._linkage.append((context, lib, 'USE_LIBS'))
            if static_lib:
                lib = StaticLibrary(context, libname, **static_args)
                self._libs[libname].append(lib)
                self._linkage.append((context, lib, 'USE_LIBS'))

            if lib_defines:
                if not libname:
                    raise SandboxValidationError('LIBRARY_DEFINES needs a '
                        'LIBRARY_NAME to take effect', context)
                lib.defines.update(lib_defines)

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, package_tests)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # package_tests indicates whether to package test files into the test
        #     package; suites that compile the test files should not install
        #     them into the test package.
        #
        test_manifests = dict(
            A11Y=('a11y', 'testing/mochitest', 'a11y', True),
            BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True),
            ANDROID_INSTRUMENTATION=('instrumentation', 'instrumentation', '.', False),
            JETPACK_PACKAGE=('jetpack-package', 'testing/mochitest', 'jetpack-package', True),
            JETPACK_ADDON=('jetpack-addon', 'testing/mochitest', 'jetpack-addon', False),
            METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
            MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
            MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
            MOCHITEST_WEBAPPRT_CONTENT=('webapprt-content', 'testing/mochitest', 'webapprtContent', True),
            MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome', 'testing/mochitest', 'webapprtChrome', True),
            WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
            XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', True),
        )

        for prefix, info in test_manifests.items():
            for path in context.get('%s_MANIFESTS' % prefix, []):
                for obj in self._process_test_manifest(context, info, path):
                    yield obj

        for flavor in ('crashtest', 'reftest'):
            for path in context.get('%s_MANIFESTS' % flavor.upper(), []):
                for obj in self._process_reftest_manifest(context, flavor, path):
                    yield obj

        jar_manifests = context.get('JAR_MANIFESTS', [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError('While JAR_MANIFESTS is a list, '
                'it is currently limited to one value.', context)

        for path in jar_manifests:
            yield JARManifest(context, mozpath.join(context.srcdir, path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(context.srcdir, 'jar.mn')):
            if 'jar.mn' not in jar_manifests:
                raise SandboxValidationError('A jar.mn exists but it '
                    'is not referenced in the moz.build file. '
                    'Please define JAR_MANIFESTS.', context)

        for name, jar in context.get('JAVA_JAR_TARGETS', {}).items():
            yield ContextWrapped(context, jar)

        for name, data in context.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items():
            yield ContextWrapped(context, data)

        if passthru.variables:
            yield passthru
예제 #40
0
def read_from_gyp(config, path, output, vars, non_unified_sources=set()):
    """Read a gyp configuration and emits GypContexts for the backend to
    process.

    config is a ConfigEnvironment, path is the path to a root gyp configuration
    file, output is the base path under which the objdir for the various gyp
    dependencies will be, and vars a dict of variables to pass to the gyp
    processor.
    """

    time_start = time.time()

    # gyp expects plain str instead of unicode. The frontend code gives us
    # unicode strings, so convert them.
    path = encode(path)
    str_vars = dict((name, encode(value)) for name, value in vars.items())

    params = {
        b'parallel': False,
        b'generator_flags': {},
        b'build_files': [path],
    }

    # Files that gyp_chromium always includes
    includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
    finder = FileFinder(chrome_src, find_executables=False)
    includes.extend(
        encode(mozpath.join(chrome_src, name))
        for name, _ in finder.find('*/supplement.gypi'))

    # Read the given gyp file and its dependencies.
    generator, flat_list, targets, data = \
        gyp.Load([path], format=b'mozbuild',
            default_variables=str_vars,
            includes=includes,
            depth=encode(mozpath.dirname(path)),
            params=params)

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets,
                                        path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
            target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir = mozpath.relpath(mozpath.dirname(build_file),
                                 mozpath.dirname(path))
        subdir = '%s_%s' % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(
            config,
            mozpath.relpath(mozpath.join(output, reldir, subdir),
                            config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]['included_files']:
            context.add_source(
                mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if spec['type'] == 'none':
            continue
        elif spec['type'] == 'static_library':
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if name.startswith('lib'):
                name = name[3:]
            # The context expects an unicode string.
            context['LIBRARY_NAME'] = name.decode('utf-8')
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            for f in spec.get('sources', []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                s = SourcePath(context, f)
                if ext == '.h':
                    continue
                if ext != '.S' and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)

            # The context expects alphabetical order when adding sources
            context['SOURCES'] = alphabetical_sorted(sources)
            context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            for define in target_conf.get('defines', []):
                if '=' in define:
                    name, value = define.split('=', 1)
                    context['DEFINES'][name] = value
                else:
                    context['DEFINES'][define] = True

            for include in target_conf.get('include_dirs', []):
                # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                #
                # NB: gyp files sometimes have actual absolute paths (e.g.
                # /usr/include32) and sometimes paths that moz.build considers
                # absolute, i.e. starting from topsrcdir. There's no good way
                # to tell them apart here, and the actual absolute paths are
                # likely bogus. In any event, actual absolute paths will be
                # filtered out by trying to find them in topsrcdir.
                if include.startswith('/'):
                    resolved = mozpath.abspath(
                        mozpath.join(config.topsrcdir, include[1:]))
                else:
                    resolved = mozpath.abspath(
                        mozpath.join(mozpath.dirname(build_file), include))
                if not os.path.exists(resolved):
                    continue
                context['LOCAL_INCLUDES'] += [include]

            context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
            flags = target_conf.get('cflags_mozilla', [])
            if flags:
                suffix_map = {
                    '.c': 'CFLAGS',
                    '.cpp': 'CXXFLAGS',
                    '.cc': 'CXXFLAGS',
                    '.m': 'CMFLAGS',
                    '.mm': 'CMMFLAGS',
                }
                variables = (suffix_map[e] for e in extensions
                             if e in suffix_map)
                for var in variables:
                    context[var].extend(flags)
        else:
            # Ignore other types than static_library because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' %
                                      spec['type'])

        # Add some features to all contexts. Put here in case LOCAL_INCLUDES
        # order matters.
        context['LOCAL_INCLUDES'] += [
            '/ipc/chromium/src',
            '/ipc/glue',
        ]
        context['GENERATED_INCLUDES'] += ['/ipc/ipdl/_ipdlheaders']
        # These get set via VC project file settings for normal GYP builds.
        if config.substs['OS_TARGET'] == 'WINNT':
            context['DEFINES']['UNICODE'] = True
            context['DEFINES']['_UNICODE'] = True
        context['DISABLE_STL_WRAPPING'] = True

        context.execution_time = time.time() - time_start
        yield context
        time_start = time.time()
예제 #41
0
파일: data.py 프로젝트: dati91/gecko-dev
 def source_files(self):
     for srcs in self.sources.values():
         for f in srcs:
             if mozpath.basename(mozpath.splitext(f)[0]) == mozpath.splitext(self.program)[0]:
                 return [f]
     return []
예제 #42
0
def read_from_gyp(config,
                  path,
                  output,
                  vars,
                  no_chromium,
                  no_unified,
                  action_overrides,
                  non_unified_sources=set()):
    """Read a gyp configuration and emits GypContexts for the backend to
    process.

    config is a ConfigEnvironment, path is the path to a root gyp configuration
    file, output is the base path under which the objdir for the various gyp
    dependencies will be, and vars a dict of variables to pass to the gyp
    processor.
    """

    is_win = config.substs['OS_TARGET'] == 'WINNT'
    is_msvc = bool(config.substs['_MSC_VER'])
    # gyp expects plain str instead of unicode. The frontend code gives us
    # unicode strings, so convert them.
    path = encode(path)
    str_vars = dict((name, encode(value)) for name, value in vars.items())
    if is_msvc:
        # This isn't actually used anywhere in this generator, but it's needed
        # to override the registry detection of VC++ in gyp.
        os.environ['GYP_MSVS_OVERRIDE_PATH'] = 'fake_path'
        os.environ['GYP_MSVS_VERSION'] = config.substs['MSVS_VERSION']

    params = {
        b'parallel': False,
        b'generator_flags': {},
        b'build_files': [path],
        b'root_targets': None,
    }

    if no_chromium:
        includes = []
        depth = mozpath.dirname(path)
    else:
        depth = chrome_src
        # Files that gyp_chromium always includes
        includes = [encode(mozpath.join(script_dir, 'common.gypi'))]
        finder = FileFinder(chrome_src, find_executables=False)
        includes.extend(
            encode(mozpath.join(chrome_src, name))
            for name, _ in finder.find('*/supplement.gypi'))

    # Read the given gyp file and its dependencies.
    generator, flat_list, targets, data = \
        gyp.Load([path], format=b'mozbuild',
            default_variables=str_vars,
            includes=includes,
            depth=encode(depth),
            params=params)

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets,
                                        path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
            target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir = mozpath.relpath(mozpath.dirname(build_file),
                                 mozpath.dirname(path))
        subdir = '%s_%s' % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(
            config,
            mozpath.relpath(mozpath.join(output, reldir, subdir),
                            config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]['included_files']:
            context.add_source(
                mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if 'actions' in spec:
            handle_actions(spec['actions'], context, action_overrides)
        if 'copies' in spec:
            handle_copies(spec['copies'], context)

        use_libs = []
        libs = []

        def add_deps(s):
            for t in s.get('dependencies', []) + s.get('dependencies_original',
                                                       []):
                ty = targets[t]['type']
                if ty in ('static_library', 'shared_library'):
                    use_libs.append(targets[t]['target_name'])
                # Manually expand out transitive dependencies--
                # gyp won't do this for static libs or none targets.
                if ty in ('static_library', 'none'):
                    add_deps(targets[t])
            libs.extend(spec.get('libraries', []))

        #XXX: this sucks, but webrtc breaks with this right now because
        # it builds a library called 'gtest' and we just get lucky
        # that it isn't in USE_LIBS by that name anywhere.
        if no_chromium:
            add_deps(spec)

        os_libs = []
        for l in libs:
            if l.startswith('-'):
                os_libs.append(l)
            elif l.endswith('.lib'):
                os_libs.append(l[:-4])
            elif l:
                # For library names passed in from moz.build.
                use_libs.append(os.path.basename(l))

        if spec['type'] == 'none':
            if not ('actions' in spec or 'copies' in spec):
                continue
        elif spec['type'] in ('static_library', 'shared_library',
                              'executable'):
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if spec['type'] in ('static_library', 'shared_library'):
                if name.startswith('lib'):
                    name = name[3:]
                # The context expects an unicode string.
                context['LIBRARY_NAME'] = name.decode('utf-8')
            else:
                context['PROGRAM'] = name.decode('utf-8')
            if spec['type'] == 'shared_library':
                context['FORCE_SHARED_LIB'] = True
            elif spec['type'] == 'static_library' and spec.get(
                    'variables', {}).get('no_expand_libs', '0') == '1':
                # PSM links a NSS static library, but our folded libnss
                # doesn't actually export everything that all of the
                # objects within would need, so that one library
                # should be built as a real static library.
                context['NO_EXPAND_LIBS'] = True
            if use_libs:
                context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower())
            if os_libs:
                context['OS_LIBS'] = os_libs
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            use_defines_in_asflags = False
            for f in spec.get('sources', []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                if f.startswith('$INTERMEDIATE_DIR/'):
                    s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/',
                                                      '!'))
                else:
                    s = SourcePath(context, f)
                if ext == '.h':
                    continue
                if ext == '.def':
                    context['SYMBOLS_FILE'] = s
                elif ext != '.S' and not no_unified and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)
                # The Mozilla build system doesn't use DEFINES for building
                # ASFILES.
                if ext == '.s':
                    use_defines_in_asflags = True

            # The context expects alphabetical order when adding sources
            context['SOURCES'] = alphabetical_sorted(sources)
            context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            defines = target_conf.get('defines', [])
            if is_msvc and no_chromium:
                msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
                defines.extend(msvs_settings.GetComputedDefines(c))
            for define in defines:
                if '=' in define:
                    name, value = define.split('=', 1)
                    context['DEFINES'][name] = value
                else:
                    context['DEFINES'][define] = True

            product_dir_dist = '$PRODUCT_DIR/dist/'
            for include in target_conf.get('include_dirs', []):
                if include.startswith(product_dir_dist):
                    # special-case includes of <(PRODUCT_DIR)/dist/ to match
                    # handle_copies above. This is used for NSS' exports.
                    include = '!/dist/include/' + include[len(product_dir_dist
                                                              ):]
                elif include.startswith(config.topobjdir):
                    # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
                    include = '!/' + mozpath.relpath(include, config.topobjdir)
                else:
                    # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                    #
                    # NB: gyp files sometimes have actual absolute paths (e.g.
                    # /usr/include32) and sometimes paths that moz.build considers
                    # absolute, i.e. starting from topsrcdir. There's no good way
                    # to tell them apart here, and the actual absolute paths are
                    # likely bogus. In any event, actual absolute paths will be
                    # filtered out by trying to find them in topsrcdir.
                    if include.startswith('/'):
                        resolved = mozpath.abspath(
                            mozpath.join(config.topsrcdir, include[1:]))
                    else:
                        resolved = mozpath.abspath(
                            mozpath.join(mozpath.dirname(build_file), include))
                    if not os.path.exists(resolved):
                        continue
                context['LOCAL_INCLUDES'] += [include]

            context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
            if use_defines_in_asflags and defines:
                context['ASFLAGS'] += ['-D' + d for d in defines]
            flags = target_conf.get('cflags_mozilla', [])
            if flags:
                suffix_map = {
                    '.c': 'CFLAGS',
                    '.cpp': 'CXXFLAGS',
                    '.cc': 'CXXFLAGS',
                    '.m': 'CMFLAGS',
                    '.mm': 'CMMFLAGS',
                }
                variables = (suffix_map[e] for e in extensions
                             if e in suffix_map)
                for var in variables:
                    for f in flags:
                        # We may be getting make variable references out of the
                        # gyp data, and we don't want those in emitted data, so
                        # substitute them with their actual value.
                        f = expand_variables(f, config.substs).split()
                        if not f:
                            continue
                        # the result may be a string or a list.
                        if isinstance(f, types.StringTypes):
                            context[var].append(f)
                        else:
                            context[var].extend(f)
        else:
            # Ignore other types because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' %
                                      spec['type'])

        if not no_chromium:
            # Add some features to all contexts. Put here in case LOCAL_INCLUDES
            # order matters.
            context['LOCAL_INCLUDES'] += [
                '!/ipc/ipdl/_ipdlheaders',
                '/ipc/chromium/src',
                '/ipc/glue',
            ]
            # These get set via VC project file settings for normal GYP builds.
            if is_win:
                context['DEFINES']['UNICODE'] = True
                context['DEFINES']['_UNICODE'] = True
        context['DISABLE_STL_WRAPPING'] = True

        yield context
예제 #43
0
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
                      sandbox_vars, mozilla_flags):
    # Translates a json gn config into attributes that can be used to write out
    # moz.build files for this configuration.

    # Much of this code is based on similar functionality in `gyp_reader.py`.

    mozbuild_attrs = {
        "mozbuild_args": gn_config.get("mozbuild_args", None),
        "dirs": {}
    }

    targets = gn_config["targets"]

    project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir)

    non_unified_sources = set(
        [mozpath.normpath(s) for s in non_unified_sources])

    def target_info(fullname):
        path, name = target_fullname.split(":")
        # Stripping '//' gives us a path relative to the project root,
        # adding a suffix avoids name collisions with libraries already
        # in the tree (like "webrtc").
        return path.lstrip("//"), name + "_gn"

    # Process all targets from the given gn project and its dependencies.
    for target_fullname, spec in six.iteritems(targets):

        target_path, target_name = target_info(target_fullname)
        context_attrs = {}

        # Remove leading 'lib' from the target_name if any, and use as
        # library name.
        name = target_name
        if spec["type"] in ("static_library", "shared_library", "source_set"):
            if name.startswith("lib"):
                name = name[3:]
            context_attrs["LIBRARY_NAME"] = six.ensure_text(name)
        else:
            raise Exception("The following GN target type is not currently "
                            'consumed by moz.build: "%s". It may need to be '
                            "added, or you may need to re-run the "
                            "`GnConfigGen` step." % spec["type"])

        if spec["type"] == "shared_library":
            context_attrs["FORCE_SHARED_LIB"] = True

        sources = []
        unified_sources = []
        extensions = set()
        use_defines_in_asflags = False

        for f in spec.get("sources", []):
            f = f.lstrip("//")
            ext = mozpath.splitext(f)[-1]
            extensions.add(ext)
            src = "%s/%s" % (project_relsrcdir, f)
            if ext == ".h":
                continue
            elif ext == ".def":
                context_attrs["SYMBOLS_FILE"] = src
            elif ext != ".S" and src not in non_unified_sources:
                unified_sources.append("/%s" % src)
            else:
                sources.append("/%s" % src)
            # The Mozilla build system doesn't use DEFINES for building
            # ASFILES.
            if ext == ".s":
                use_defines_in_asflags = True

        context_attrs["SOURCES"] = sources
        context_attrs["UNIFIED_SOURCES"] = unified_sources

        context_attrs["DEFINES"] = {}
        for define in spec.get("defines", []):
            if "=" in define:
                name, value = define.split("=", 1)
                context_attrs["DEFINES"][name] = value
            else:
                context_attrs["DEFINES"][define] = True

        context_attrs["LOCAL_INCLUDES"] = []
        for include in spec.get("include_dirs", []):
            # GN will have resolved all these paths relative to the root of
            # the project indicated by "//".
            if include.startswith("//"):
                include = include[2:]
            # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
            if include.startswith("/"):
                resolved = mozpath.abspath(
                    mozpath.join(config.topsrcdir, include[1:]))
            else:
                resolved = mozpath.abspath(mozpath.join(srcdir, include))
            if not os.path.exists(resolved):
                # GN files may refer to include dirs that are outside of the
                # tree or we simply didn't vendor. Print a warning in this case.
                if not resolved.endswith("gn-output/gen"):
                    print(
                        "Included path: '%s' does not exist, dropping include from GN "
                        "configuration." % resolved,
                        file=sys.stderr,
                    )
                continue
            if not include.startswith("/"):
                include = "/%s/%s" % (project_relsrcdir, include)
            context_attrs["LOCAL_INCLUDES"] += [include]

        context_attrs["ASFLAGS"] = spec.get("asflags_mozilla", [])
        if use_defines_in_asflags and context_attrs["DEFINES"]:
            context_attrs["ASFLAGS"] += [
                "-D" + d for d in context_attrs["DEFINES"]
            ]
        flags = [_f for _f in spec.get("cflags", []) if _f in mozilla_flags]
        if flags:
            suffix_map = {
                ".c": "CFLAGS",
                ".cpp": "CXXFLAGS",
                ".cc": "CXXFLAGS",
                ".m": "CMFLAGS",
                ".mm": "CMMFLAGS",
            }
            variables = (suffix_map[e] for e in extensions if e in suffix_map)
            for var in variables:
                for f in flags:
                    # We may be getting make variable references out of the
                    # gn data, and we don't want those in emitted data, so
                    # substitute them with their actual value.
                    f = expand_variables(f, config.substs).split()
                    if not f:
                        continue
                    # the result may be a string or a list.
                    if isinstance(f, six.string_types):
                        context_attrs.setdefault(var, []).append(f)
                    else:
                        context_attrs.setdefault(var, []).extend(f)

        context_attrs["OS_LIBS"] = []
        for lib in spec.get("libs", []):
            lib_name = os.path.splitext(lib)[0]
            if lib.endswith(".framework"):
                context_attrs["OS_LIBS"] += ["-framework " + lib_name]
            else:
                context_attrs["OS_LIBS"] += [lib_name]

        # Add some features to all contexts. Put here in case LOCAL_INCLUDES
        # order matters.
        context_attrs["LOCAL_INCLUDES"] += [
            "!/ipc/ipdl/_ipdlheaders",
            "/ipc/chromium/src",
            "/ipc/glue",
            "/tools/profiler/public",
        ]
        # These get set via VC project file settings for normal GYP builds.
        # TODO: Determine if these defines are needed for GN builds.
        if gn_config["mozbuild_args"]["OS_TARGET"] == "WINNT":
            context_attrs["DEFINES"]["UNICODE"] = True
            context_attrs["DEFINES"]["_UNICODE"] = True

        context_attrs["COMPILE_FLAGS"] = {"OS_INCLUDES": []}

        for key, value in sandbox_vars.items():
            if context_attrs.get(key) and isinstance(context_attrs[key], list):
                # If we have a key from sandbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context_attrs[key] = value + context_attrs[key]
            elif context_attrs.get(key) and isinstance(context_attrs[key],
                                                       dict):
                context_attrs[key].update(value)
            else:
                context_attrs[key] = value

        target_relsrcdir = mozpath.join(project_relsrcdir, target_path,
                                        target_name)
        mozbuild_attrs["dirs"][target_relsrcdir] = context_attrs

    return mozbuild_attrs
예제 #44
0
파일: data.py 프로젝트: luke-chang/gecko-1
 def generated_events_stems(self):
     return [mozpath.splitext(b)[0] for b in self.generated_events_basenames()]
예제 #45
0
파일: data.py 프로젝트: luke-chang/gecko-1
    def all_regular_bindinggen_stems(self):
        for stem in self.all_regular_stems():
            yield '%sBinding' % stem

        for source in self.generated_events_sources:
            yield mozpath.splitext(mozpath.basename(source))[0]
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources,
                      sandbox_vars, mozilla_flags):
    # Translates a json gn config into attributes that can be used to write out
    # moz.build files for this configuration.

    # Much of this code is based on similar functionality in `gyp_reader.py`.

    mozbuild_attrs = {
        'mozbuild_args': gn_config.get('mozbuild_args', None),
        'dirs': {}
    }

    targets = gn_config["targets"]

    project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir)

    def target_info(fullname):
        path, name = target_fullname.split(':')
        # Stripping '//' gives us a path relative to the project root,
        # adding a suffix avoids name collisions with libraries already
        # in the tree (like "webrtc").
        return path.lstrip('//'), name + '_gn'

    # Process all targets from the given gn project and its dependencies.
    for target_fullname, spec in targets.iteritems():

        target_path, target_name = target_info(target_fullname)
        context_attrs = {}

        # Remove leading 'lib' from the target_name if any, and use as
        # library name.
        name = target_name
        if spec['type'] in ('static_library', 'shared_library', 'source_set'):
            if name.startswith('lib'):
                name = name[3:]
            context_attrs['LIBRARY_NAME'] = name.decode('utf-8')
        else:
            raise Exception('The following GN target type is not currently '
                            'consumed by moz.build: "%s". It may need to be '
                            'added, or you may need to re-run the '
                            '`GnConfigGen` step.' % spec['type'])

        if spec['type'] == 'shared_library':
            context_attrs['FORCE_SHARED_LIB'] = True

        sources = []
        unified_sources = []
        extensions = set()
        use_defines_in_asflags = False

        for f in spec.get('sources', []):
            f = f.lstrip("//")
            ext = mozpath.splitext(f)[-1]
            extensions.add(ext)
            src = '%s/%s' % (project_relsrcdir, f)
            if ext == '.h':
                continue
            elif ext == '.def':
                context_attrs['SYMBOLS_FILE'] = src
            elif ext != '.S' and src not in non_unified_sources:
                unified_sources.append('/%s' % src)
            else:
                sources.append('/%s' % src)
            # The Mozilla build system doesn't use DEFINES for building
            # ASFILES.
            if ext == '.s':
                use_defines_in_asflags = True

        context_attrs['SOURCES'] = sources
        context_attrs['UNIFIED_SOURCES'] = unified_sources

        context_attrs['DEFINES'] = {}
        for define in spec.get('defines', []):
            if '=' in define:
                name, value = define.split('=', 1)
                context_attrs['DEFINES'][name] = value
            else:
                context_attrs['DEFINES'][define] = True

        context_attrs['LOCAL_INCLUDES'] = []
        for include in spec.get('include_dirs', []):
            # GN will have resolved all these paths relative to the root of
            # the project indicated by "//".
            if include.startswith('//'):
                include = include[2:]
            # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
            if include.startswith('/'):
                resolved = mozpath.abspath(
                    mozpath.join(config.topsrcdir, include[1:]))
            else:
                resolved = mozpath.abspath(mozpath.join(srcdir, include))
            if not os.path.exists(resolved):
                # GN files may refer to include dirs that are outside of the
                # tree or we simply didn't vendor. Print a warning in this case.
                if not resolved.endswith('gn-output/gen'):
                    print(
                        "Included path: '%s' does not exist, dropping include from GN "
                        "configuration." % resolved,
                        file=sys.stderr)
                continue
            if not include.startswith('/'):
                include = '/%s/%s' % (project_relsrcdir, include)
            context_attrs['LOCAL_INCLUDES'] += [include]

        context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', [])
        if use_defines_in_asflags and defines:
            context_attrs['ASFLAGS'] += ['-D' + d for d in defines]
        flags = [f for f in spec.get('cflags', []) if f in mozilla_flags]
        if flags:
            suffix_map = {
                '.c': 'CFLAGS',
                '.cpp': 'CXXFLAGS',
                '.cc': 'CXXFLAGS',
                '.m': 'CMFLAGS',
                '.mm': 'CMMFLAGS',
            }
            variables = (suffix_map[e] for e in extensions if e in suffix_map)
            for var in variables:
                for f in flags:
                    # We may be getting make variable references out of the
                    # gn data, and we don't want those in emitted data, so
                    # substitute them with their actual value.
                    f = expand_variables(f, config.substs).split()
                    if not f:
                        continue
                    # the result may be a string or a list.
                    if isinstance(f, types.StringTypes):
                        context_attrs.setdefault(var, []).append(f)
                    else:
                        context_attrs.setdefault(var, []).extend(f)

        context_attrs['OS_LIBS'] = []
        for lib in spec.get('libs', []):
            lib_name = os.path.splitext(lib)[0]
            if lib.endswith('.framework'):
                context_attrs['OS_LIBS'] += ['-framework ' + lib_name]
            else:
                context_attrs['OS_LIBS'] += [lib_name]

        # Add some features to all contexts. Put here in case LOCAL_INCLUDES
        # order matters.
        context_attrs['LOCAL_INCLUDES'] += [
            '!/ipc/ipdl/_ipdlheaders',
            '/ipc/chromium/src',
            '/ipc/glue',
        ]
        # These get set via VC project file settings for normal GYP builds.
        # TODO: Determine if these defines are needed for GN builds.
        if gn_config['mozbuild_args']['OS_TARGET'] == 'WINNT':
            context_attrs['DEFINES']['UNICODE'] = True
            context_attrs['DEFINES']['_UNICODE'] = True

        context_attrs['COMPILE_FLAGS'] = {
            'STL': [],
            'OS_INCLUDES': [],
        }

        for key, value in sandbox_vars.items():
            if context_attrs.get(key) and isinstance(context_attrs[key], list):
                # If we have a key from sandbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context_attrs[key] = value + context_attrs[key]
            elif context_attrs.get(key) and isinstance(context_attrs[key],
                                                       dict):
                context_attrs[key].update(value)
            else:
                context_attrs[key] = value

        target_relsrcdir = mozpath.join(project_relsrcdir, target_path,
                                        target_name)
        mozbuild_attrs['dirs'][target_relsrcdir] = context_attrs

    return mozbuild_attrs
예제 #47
0
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output,
                       non_unified_sources, action_overrides):
    flat_list, targets, data = gyp_result
    no_chromium = gyp_dir_attrs.no_chromium
    no_unified = gyp_dir_attrs.no_unified

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in gyp.common.AllTargets(flat_list, targets,
                                        path.replace(b'/', os.sep)):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
            target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir = mozpath.relpath(mozpath.dirname(build_file),
                                 mozpath.dirname(path))
        subdir = '%s_%s' % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(
            config,
            mozpath.relpath(mozpath.join(output, reldir, subdir),
                            config.topobjdir))
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]['included_files']:
            context.add_source(
                mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = 'Debug' if config.substs.get('MOZ_DEBUG') else 'Release'
        if c not in spec['configurations']:
            raise RuntimeError('Missing %s gyp configuration for target %s '
                               'in %s' % (c, target_name, build_file))
        target_conf = spec['configurations'][c]

        if 'actions' in spec:
            handle_actions(spec['actions'], context, action_overrides)
        if 'copies' in spec:
            handle_copies(spec['copies'], context)

        use_libs = []
        libs = []

        def add_deps(s):
            for t in s.get('dependencies', []) + s.get('dependencies_original',
                                                       []):
                ty = targets[t]['type']
                if ty in ('static_library', 'shared_library'):
                    use_libs.append(targets[t]['target_name'])
                # Manually expand out transitive dependencies--
                # gyp won't do this for static libs or none targets.
                if ty in ('static_library', 'none'):
                    add_deps(targets[t])
            libs.extend(spec.get('libraries', []))

        #XXX: this sucks, but webrtc breaks with this right now because
        # it builds a library called 'gtest' and we just get lucky
        # that it isn't in USE_LIBS by that name anywhere.
        if no_chromium:
            add_deps(spec)

        os_libs = []
        for l in libs:
            if l.startswith('-'):
                os_libs.append(l)
            elif l.endswith('.lib'):
                os_libs.append(l[:-4])
            elif l:
                # For library names passed in from moz.build.
                use_libs.append(os.path.basename(l))

        if spec['type'] == 'none':
            if not ('actions' in spec or 'copies' in spec):
                continue
        elif spec['type'] in ('static_library', 'shared_library',
                              'executable'):
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = spec['target_name']
            if spec['type'] in ('static_library', 'shared_library'):
                if name.startswith('lib'):
                    name = name[3:]
                # The context expects an unicode string.
                context['LIBRARY_NAME'] = name.decode('utf-8')
            else:
                context['PROGRAM'] = name.decode('utf-8')
            if spec['type'] == 'shared_library':
                context['FORCE_SHARED_LIB'] = True
            elif spec['type'] == 'static_library' and spec.get(
                    'variables', {}).get('no_expand_libs', '0') == '1':
                # PSM links a NSS static library, but our folded libnss
                # doesn't actually export everything that all of the
                # objects within would need, so that one library
                # should be built as a real static library.
                context['NO_EXPAND_LIBS'] = True
            if use_libs:
                context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower())
            if os_libs:
                context['OS_LIBS'] = os_libs
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            use_defines_in_asflags = False
            for f in spec.get('sources', []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                if f.startswith('$INTERMEDIATE_DIR/'):
                    s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/',
                                                      '!'))
                else:
                    s = SourcePath(context, f)
                if ext == '.h':
                    continue
                if ext == '.def':
                    context['SYMBOLS_FILE'] = s
                elif ext != '.S' and not no_unified and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)
                # The Mozilla build system doesn't use DEFINES for building
                # ASFILES.
                if ext == '.s':
                    use_defines_in_asflags = True

            # The context expects alphabetical order when adding sources
            context['SOURCES'] = alphabetical_sorted(sources)
            context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources)

            defines = target_conf.get('defines', [])
            if config.substs['CC_TYPE'] == 'clang-cl' and no_chromium:
                msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
                defines.extend(msvs_settings.GetComputedDefines(c))
            for define in defines:
                if '=' in define:
                    name, value = define.split('=', 1)
                    # The NSS gyp file doesn't expose a way to override this
                    # currently, so we do so here.
                    if name == 'NSS_ALLOW_SSLKEYLOGFILE' and config.substs.get(
                            'RELEASE_OR_BETA', False):
                        continue
                    context['DEFINES'][name] = value
                else:
                    context['DEFINES'][define] = True

            product_dir_dist = '$PRODUCT_DIR/dist/'
            for include in target_conf.get('include_dirs', []):
                if include.startswith(product_dir_dist):
                    # special-case includes of <(PRODUCT_DIR)/dist/ to match
                    # handle_copies above. This is used for NSS' exports.
                    include = '!/dist/include/' + include[len(product_dir_dist
                                                              ):]
                elif include.startswith(config.topobjdir):
                    # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
                    include = '!/' + mozpath.relpath(include, config.topobjdir)
                else:
                    # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                    #
                    # NB: gyp files sometimes have actual absolute paths (e.g.
                    # /usr/include32) and sometimes paths that moz.build considers
                    # absolute, i.e. starting from topsrcdir. There's no good way
                    # to tell them apart here, and the actual absolute paths are
                    # likely bogus. In any event, actual absolute paths will be
                    # filtered out by trying to find them in topsrcdir.
                    #
                    # We do allow !- and %-prefixed paths, assuming they come
                    # from moz.build and will be handled the same way as if they
                    # were given to LOCAL_INCLUDES in moz.build.
                    if include.startswith('/'):
                        resolved = mozpath.abspath(
                            mozpath.join(config.topsrcdir, include[1:]))
                    elif not include.startswith(('!', '%')):
                        resolved = mozpath.abspath(
                            mozpath.join(mozpath.dirname(build_file), include))
                    if not include.startswith(
                        ('!', '%')) and not os.path.exists(resolved):
                        continue
                context['LOCAL_INCLUDES'] += [include]

            context['ASFLAGS'] = target_conf.get('asflags_mozilla', [])
            if use_defines_in_asflags and defines:
                context['ASFLAGS'] += ['-D' + d for d in defines]
            flags = target_conf.get('cflags_mozilla', [])
            if flags:
                suffix_map = {
                    '.c': 'CFLAGS',
                    '.cpp': 'CXXFLAGS',
                    '.cc': 'CXXFLAGS',
                    '.m': 'CMFLAGS',
                    '.mm': 'CMMFLAGS',
                }
                variables = (suffix_map[e] for e in extensions
                             if e in suffix_map)
                for var in variables:
                    for f in flags:
                        # We may be getting make variable references out of the
                        # gyp data, and we don't want those in emitted data, so
                        # substitute them with their actual value.
                        f = expand_variables(f, config.substs).split()
                        if not f:
                            continue
                        # the result may be a string or a list.
                        if isinstance(f, types.StringTypes):
                            context[var].append(f)
                        else:
                            context[var].extend(f)
        else:
            # Ignore other types because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError('Unsupported gyp target type: %s' %
                                      spec['type'])

        if not no_chromium:
            # Add some features to all contexts. Put here in case LOCAL_INCLUDES
            # order matters.
            context['LOCAL_INCLUDES'] += [
                '!/ipc/ipdl/_ipdlheaders',
                '/ipc/chromium/src',
                '/ipc/glue',
            ]
            # These get set via VC project file settings for normal GYP builds.
            if config.substs['OS_TARGET'] == 'WINNT':
                context['DEFINES']['UNICODE'] = True
                context['DEFINES']['_UNICODE'] = True
        context['COMPILE_FLAGS']['OS_INCLUDES'] = []

        for key, value in gyp_dir_attrs.sandbox_vars.items():
            if context.get(key) and isinstance(context[key], list):
                # If we have a key from sanbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context[key] = value + context[key]
            elif context.get(key) and isinstance(context[key], dict):
                context[key].update(value)
            else:
                context[key] = value

        yield context
예제 #48
0
 def add_idls(self, idls):
     self.idl_files.update(idl.full_path for idl in idls)
     self.directories.update(
         mozpath.dirname(idl.full_path) for idl in idls)
     self._stems.update(
         mozpath.splitext(mozpath.basename(idl))[0] for idl in idls)
예제 #49
0
파일: data.py 프로젝트: isabella232/juggler
 def all_stems(self):
     return [mozpath.splitext(b)[0] for b in self.all_basenames()]
예제 #50
0
파일: data.py 프로젝트: luke-chang/gecko-1
 def all_stems(self):
     return [mozpath.splitext(b)[0] for b in self.all_basenames()]
예제 #51
0
    def emit_from_sandbox(self, sandbox):
        """Convert a MozbuildSandbox to tree metadata objects.

        This is a generator of mozbuild.frontend.data.SandboxDerived instances.
        """
        # We always emit a directory traversal descriptor. This is needed by
        # the recursive make backend.
        for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o

        for path in sandbox['CONFIGURE_SUBST_FILES']:
            yield self._create_substitution(ConfigFileSubstitution, sandbox,
                path)

        for path in sandbox['CONFIGURE_DEFINE_FILES']:
            yield self._create_substitution(HeaderFileSubstitution, sandbox,
                path)

        # XPIDL source files get processed and turned into .h and .xpt files.
        # If there are multiple XPIDL files in a directory, they get linked
        # together into a final .xpt, which has the name defined by
        # XPIDL_MODULE.
        xpidl_module = sandbox['XPIDL_MODULE']

        if sandbox['XPIDL_SOURCES'] and not xpidl_module:
            raise SandboxValidationError('XPIDL_MODULE must be defined if '
                'XPIDL_SOURCES is defined.', sandbox)

        if xpidl_module and not sandbox['XPIDL_SOURCES']:
            raise SandboxValidationError('XPIDL_MODULE cannot be defined '
                'unless there are XPIDL_SOURCES', sandbox)

        if sandbox['XPIDL_SOURCES'] and sandbox['NO_DIST_INSTALL']:
            self.log(logging.WARN, 'mozbuild_warning', dict(
                path=sandbox.main_path),
                '{path}: NO_DIST_INSTALL has no effect on XPIDL_SOURCES.')

        for idl in sandbox['XPIDL_SOURCES']:
            yield XPIDLFile(sandbox, mozpath.join(sandbox['SRCDIR'], idl),
                xpidl_module)

        for symbol in ('SOURCES', 'HOST_SOURCES', 'UNIFIED_SOURCES'):
            for src in (sandbox[symbol] or []):
                if not os.path.exists(mozpath.join(sandbox['SRCDIR'], src)):
                    raise SandboxValidationError('Reference to a file that '
                        'doesn\'t exist in %s (%s)'
                        % (symbol, src), sandbox)

        # Proxy some variables as-is until we have richer classes to represent
        # them. We should aim to keep this set small because it violates the
        # desired abstraction of the build definition away from makefiles.
        passthru = VariablePassthru(sandbox)
        varlist = [
            'ANDROID_GENERATED_RESFILES',
            'ANDROID_RES_DIRS',
            'DISABLE_STL_WRAPPING',
            'EXTRA_ASSEMBLER_FLAGS',
            'EXTRA_COMPILE_FLAGS',
            'EXTRA_COMPONENTS',
            'EXTRA_DSO_LDOPTS',
            'EXTRA_PP_COMPONENTS',
            'FAIL_ON_WARNINGS',
            'FILES_PER_UNIFIED_FILE',
            'USE_STATIC_LIBS',
            'GENERATED_FILES',
            'IS_GYP_DIR',
            'MSVC_ENABLE_PGO',
            'NO_DIST_INSTALL',
            'PYTHON_UNIT_TESTS',
            'RCFILE',
            'RESFILE',
            'RCINCLUDE',
            'DEFFILE',
            'WIN32_EXE_LDFLAGS',
            'LD_VERSION_SCRIPT',
        ]
        for v in varlist:
            if v in sandbox and sandbox[v]:
                passthru.variables[v] = sandbox[v]

        for v in ['CFLAGS', 'CXXFLAGS', 'CMFLAGS', 'CMMFLAGS', 'LDFLAGS']:
            if v in sandbox and sandbox[v]:
                passthru.variables['MOZBUILD_' + v] = sandbox[v]

        # NO_VISIBILITY_FLAGS is slightly different
        if sandbox['NO_VISIBILITY_FLAGS']:
            passthru.variables['VISIBILITY_FLAGS'] = ''

        if sandbox['DELAYLOAD_DLLS']:
            passthru.variables['DELAYLOAD_LDFLAGS'] = [('-DELAYLOAD:%s' % dll) for dll in sandbox['DELAYLOAD_DLLS']]
            passthru.variables['USE_DELAYIMP'] = True

        varmap = dict(
            SOURCES={
                '.s': 'ASFILES',
                '.asm': 'ASFILES',
                '.c': 'CSRCS',
                '.m': 'CMSRCS',
                '.mm': 'CMMSRCS',
                '.cc': 'CPPSRCS',
                '.cpp': 'CPPSRCS',
                '.cxx': 'CPPSRCS',
                '.S': 'SSRCS',
            },
            HOST_SOURCES={
                '.c': 'HOST_CSRCS',
                '.mm': 'HOST_CMMSRCS',
                '.cc': 'HOST_CPPSRCS',
                '.cpp': 'HOST_CPPSRCS',
                '.cxx': 'HOST_CPPSRCS',
            },
            UNIFIED_SOURCES={
                '.c': 'UNIFIED_CSRCS',
                '.mm': 'UNIFIED_CMMSRCS',
                '.cc': 'UNIFIED_CPPSRCS',
                '.cpp': 'UNIFIED_CPPSRCS',
                '.cxx': 'UNIFIED_CPPSRCS',
            }
        )
        varmap.update(dict(('GENERATED_%s' % k, v) for k, v in varmap.items()
                           if k in ('SOURCES', 'UNIFIED_SOURCES')))
        for variable, mapping in varmap.items():
            for f in sandbox[variable]:
                ext = mozpath.splitext(f)[1]
                if ext not in mapping:
                    raise SandboxValidationError(
                        '%s has an unknown file type.' % f, sandbox)
                l = passthru.variables.setdefault(mapping[ext], [])
                l.append(f)
                if variable.startswith('GENERATED_'):
                    l = passthru.variables.setdefault('GARBAGE', [])
                    l.append(f)

        no_pgo = sandbox.get('NO_PGO')
        sources = sandbox.get('SOURCES', [])
        no_pgo_sources = [f for f in sources if sources[f].no_pgo]
        if no_pgo:
            if no_pgo_sources:
                raise SandboxValidationError('NO_PGO and SOURCES[...].no_pgo '
                    'cannot be set at the same time', sandbox)
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo
        if no_pgo_sources:
            passthru.variables['NO_PROFILE_GUIDED_OPTIMIZE'] = no_pgo_sources

        sources_with_flags = [f for f in sources if sources[f].flags]
        for f in sources_with_flags:
            ext = mozpath.splitext(f)[1]
            yield PerSourceFlag(sandbox, f, sources[f].flags)

        exports = sandbox.get('EXPORTS')
        if exports:
            yield Exports(sandbox, exports,
                dist_install=not sandbox.get('NO_DIST_INSTALL', False))

        defines = sandbox.get('DEFINES')
        if defines:
            yield Defines(sandbox, defines)

        resources = sandbox.get('RESOURCE_FILES')
        if resources:
            yield Resources(sandbox, resources, defines)

        for kind, cls in [('PROGRAM', Program), ('HOST_PROGRAM', HostProgram)]:
            program = sandbox.get(kind)
            if program:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" as %s name, '
                        'because it is already used in %s' % (program, kind,
                        self._binaries[program].relativedir), sandbox)
                self._binaries[program] = cls(sandbox, program)
                self._linkage.append((sandbox, self._binaries[program],
                    kind.replace('PROGRAM', 'USE_LIBS')))

        for kind, cls in [
                ('SIMPLE_PROGRAMS', SimpleProgram),
                ('CPP_UNIT_TESTS', SimpleProgram),
                ('HOST_SIMPLE_PROGRAMS', HostSimpleProgram)]:
            for program in sandbox[kind]:
                if program in self._binaries:
                    raise SandboxValidationError(
                        'Cannot use "%s" in %s, '
                        'because it is already used in %s' % (program, kind,
                        self._binaries[program].relativedir), sandbox)
                self._binaries[program] = cls(sandbox, program,
                    is_unit_test=kind == 'CPP_UNIT_TESTS')
                self._linkage.append((sandbox, self._binaries[program],
                    'HOST_USE_LIBS' if kind == 'HOST_SIMPLE_PROGRAMS'
                    else 'USE_LIBS'))

        extra_js_modules = sandbox.get('EXTRA_JS_MODULES')
        if extra_js_modules:
            yield JavaScriptModules(sandbox, extra_js_modules, 'extra')

        extra_pp_js_modules = sandbox.get('EXTRA_PP_JS_MODULES')
        if extra_pp_js_modules:
            yield JavaScriptModules(sandbox, extra_pp_js_modules, 'extra_pp')

        test_js_modules = sandbox.get('TESTING_JS_MODULES')
        if test_js_modules:
            yield JavaScriptModules(sandbox, test_js_modules, 'testing')

        simple_lists = [
            ('GENERATED_EVENTS_WEBIDL_FILES', GeneratedEventWebIDLFile),
            ('GENERATED_WEBIDL_FILES', GeneratedWebIDLFile),
            ('IPDL_SOURCES', IPDLFile),
            ('LOCAL_INCLUDES', LocalInclude),
            ('GENERATED_INCLUDES', GeneratedInclude),
            ('PREPROCESSED_TEST_WEBIDL_FILES', PreprocessedTestWebIDLFile),
            ('PREPROCESSED_WEBIDL_FILES', PreprocessedWebIDLFile),
            ('TEST_WEBIDL_FILES', TestWebIDLFile),
            ('WEBIDL_FILES', WebIDLFile),
            ('WEBIDL_EXAMPLE_INTERFACES', ExampleWebIDLInterface),
        ]
        for sandbox_var, klass in simple_lists:
            for name in sandbox.get(sandbox_var, []):
                yield klass(sandbox, name)

        if sandbox.get('FINAL_TARGET') or sandbox.get('XPI_NAME') or \
                sandbox.get('DIST_SUBDIR'):
            yield InstallationTarget(sandbox)

        host_libname = sandbox.get('HOST_LIBRARY_NAME')
        libname = sandbox.get('LIBRARY_NAME')

        if host_libname:
            if host_libname == libname:
                raise SandboxValidationError('LIBRARY_NAME and '
                    'HOST_LIBRARY_NAME must have a different value', sandbox)
            lib = HostLibrary(sandbox, host_libname)
            self._libs[host_libname].append(lib)
            self._linkage.append((sandbox, lib, 'HOST_USE_LIBS'))

        final_lib = sandbox.get('FINAL_LIBRARY')
        if not libname and final_lib:
            # If no LIBRARY_NAME is given, create one.
            libname = sandbox['RELATIVEDIR'].replace('/', '_')

        static_lib = sandbox.get('FORCE_STATIC_LIB')
        shared_lib = sandbox.get('FORCE_SHARED_LIB')

        static_name = sandbox.get('STATIC_LIBRARY_NAME')
        shared_name = sandbox.get('SHARED_LIBRARY_NAME')

        is_framework = sandbox.get('IS_FRAMEWORK')
        is_component = sandbox.get('IS_COMPONENT')

        soname = sandbox.get('SONAME')

        shared_args = {}
        static_args = {}

        if final_lib:
            if isinstance(sandbox, MozbuildSandbox):
                if static_lib:
                    raise SandboxValidationError(
                        'FINAL_LIBRARY implies FORCE_STATIC_LIB. '
                        'Please remove the latter.', sandbox)
            if shared_lib:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. '
                    'Please remove one.', sandbox)
            if is_framework:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_FRAMEWORK. '
                    'Please remove one.', sandbox)
            if is_component:
                raise SandboxValidationError(
                    'FINAL_LIBRARY conflicts with IS_COMPONENT. '
                    'Please remove one.', sandbox)
            static_args['link_into'] = final_lib
            static_lib = True

        if libname:
            if is_component:
                if shared_lib:
                    raise SandboxValidationError(
                        'IS_COMPONENT implies FORCE_SHARED_LIB. '
                        'Please remove the latter.', sandbox)
                if is_framework:
                    raise SandboxValidationError(
                        'IS_COMPONENT conflicts with IS_FRAMEWORK. '
                        'Please remove one.', sandbox)
                if static_lib:
                    raise SandboxValidationError(
                        'IS_COMPONENT conflicts with FORCE_STATIC_LIB. '
                        'Please remove one.', sandbox)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.COMPONENT

            if is_framework:
                if shared_lib:
                    raise SandboxValidationError(
                        'IS_FRAMEWORK implies FORCE_SHARED_LIB. '
                        'Please remove the latter.', sandbox)
                if soname:
                    raise SandboxValidationError(
                        'IS_FRAMEWORK conflicts with SONAME. '
                        'Please remove one.', sandbox)
                shared_lib = True
                shared_args['variant'] = SharedLibrary.FRAMEWORK

            if static_name:
                if not static_lib:
                    raise SandboxValidationError(
                        'STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB', sandbox)
                static_args['real_name'] = static_name

            if shared_name:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB', sandbox)
                shared_args['real_name'] = shared_name

            if soname:
                if not shared_lib:
                    raise SandboxValidationError(
                        'SONAME requires FORCE_SHARED_LIB', sandbox)
                shared_args['soname'] = soname

            if not static_lib and not shared_lib:
                static_lib = True

            # If both a shared and a static library are created, only the
            # shared library is meant to be a SDK library.
            if sandbox.get('SDK_LIBRARY'):
                if shared_lib:
                    shared_args['is_sdk'] = True
                elif static_lib:
                    static_args['is_sdk'] = True

            if shared_lib and static_lib:
                if not static_name and not shared_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but neither STATIC_LIBRARY_NAME or '
                        'SHARED_LIBRARY_NAME is set. At least one is required.',
                        sandbox)
                if static_name and not shared_name and static_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and SHARED_LIBRARY_NAME is unset. Please either '
                        'change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'SHARED_LIBRARY_NAME.', sandbox)
                if shared_name and not static_name and shared_name == libname:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, '
                        'and STATIC_LIBRARY_NAME is unset. Please either '
                        'change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set '
                        'STATIC_LIBRARY_NAME.', sandbox)
                if shared_name and static_name and shared_name == static_name:
                    raise SandboxValidationError(
                        'Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, '
                        'but SHARED_LIBRARY_NAME is the same as '
                        'STATIC_LIBRARY_NAME. Please change one of them.',
                        sandbox)

            if shared_lib:
                lib = SharedLibrary(sandbox, libname, **shared_args)
                self._libs[libname].append(lib)
                self._linkage.append((sandbox, lib, 'USE_LIBS'))
            if static_lib:
                lib = StaticLibrary(sandbox, libname, **static_args)
                self._libs[libname].append(lib)
                self._linkage.append((sandbox, lib, 'USE_LIBS'))

        # While there are multiple test manifests, the behavior is very similar
        # across them. We enforce this by having common handling of all
        # manifests and outputting a single class type with the differences
        # described inside the instance.
        #
        # Keys are variable prefixes and values are tuples describing how these
        # manifests should be handled:
        #
        #    (flavor, install_prefix, active)
        #
        # flavor identifies the flavor of this test.
        # install_prefix is the path prefix of where to install the files in
        #     the tests directory.
        # active indicates whether to filter out inactive tests from the
        #     manifest.
        #
        # We ideally don't filter out inactive tests. However, not every test
        # harness can yet deal with test filtering. Once all harnesses can do
        # this, this feature can be dropped.
        test_manifests = dict(
            A11Y=('a11y', 'testing/mochitest', 'a11y', True),
            BROWSER_CHROME=('browser-chrome', 'testing/mochitest', 'browser', True),
            METRO_CHROME=('metro-chrome', 'testing/mochitest', 'metro', True),
            MOCHITEST=('mochitest', 'testing/mochitest', 'tests', True),
            MOCHITEST_CHROME=('chrome', 'testing/mochitest', 'chrome', True),
            MOCHITEST_WEBAPPRT_CHROME=('webapprt-chrome', 'testing/mochitest', 'webapprtChrome', True),
            WEBRTC_SIGNALLING_TEST=('steeplechase', 'steeplechase', '.', True),
            XPCSHELL_TESTS=('xpcshell', 'xpcshell', '.', False),
        )

        for prefix, info in test_manifests.items():
            for path in sandbox.get('%s_MANIFESTS' % prefix, []):
                for obj in self._process_test_manifest(sandbox, info, path):
                    yield obj

        for flavor in ('crashtest', 'reftest'):
            for path in sandbox.get('%s_MANIFESTS' % flavor.upper(), []):
                for obj in self._process_reftest_manifest(sandbox, flavor, path):
                    yield obj

        jar_manifests = sandbox.get('JAR_MANIFESTS', [])
        if len(jar_manifests) > 1:
            raise SandboxValidationError('While JAR_MANIFESTS is a list, '
                'it is currently limited to one value.', sandbox)

        for path in jar_manifests:
            yield JARManifest(sandbox, mozpath.join(sandbox['SRCDIR'], path))

        # Temporary test to look for jar.mn files that creep in without using
        # the new declaration. Before, we didn't require jar.mn files to
        # declared anywhere (they were discovered). This will detect people
        # relying on the old behavior.
        if os.path.exists(os.path.join(sandbox['SRCDIR'], 'jar.mn')):
            if 'jar.mn' not in jar_manifests:
                raise SandboxValidationError('A jar.mn exists but it '
                    'is not referenced in the moz.build file. '
                    'Please define JAR_MANIFESTS.', sandbox)

        for name, jar in sandbox.get('JAVA_JAR_TARGETS', {}).items():
            yield SandboxWrapped(sandbox, jar)

        for name, data in sandbox.get('ANDROID_ECLIPSE_PROJECT_TARGETS', {}).items():
            yield SandboxWrapped(sandbox, data)

        if passthru.variables:
            yield passthru
예제 #52
0
def process_gyp_result(
    gyp_result,
    gyp_dir_attrs,
    path,
    config,
    output,
    non_unified_sources,
    action_overrides,
):
    flat_list, targets, data = gyp_result
    no_chromium = gyp_dir_attrs.no_chromium
    no_unified = gyp_dir_attrs.no_unified

    # Process all targets from the given gyp files and its dependencies.
    # The path given to AllTargets needs to use os.sep, while the frontend code
    # gives us paths normalized with forward slash separator.
    for target in sorted(
            gyp.common.AllTargets(flat_list, targets,
                                  path.replace("/", os.sep))):
        build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
            target)

        # Each target is given its own objdir. The base of that objdir
        # is derived from the relative path from the root gyp file path
        # to the current build_file, placed under the given output
        # directory. Since several targets can be in a given build_file,
        # separate them in subdirectories using the build_file basename
        # and the target_name.
        reldir = mozpath.relpath(mozpath.dirname(build_file),
                                 mozpath.dirname(path))
        subdir = "%s_%s" % (
            mozpath.splitext(mozpath.basename(build_file))[0],
            target_name,
        )
        # Emit a context for each target.
        context = GypContext(
            config,
            mozpath.relpath(mozpath.join(output, reldir, subdir),
                            config.topobjdir),
        )
        context.add_source(mozpath.abspath(build_file))
        # The list of included files returned by gyp are relative to build_file
        for f in data[build_file]["included_files"]:
            context.add_source(
                mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f)))

        spec = targets[target]

        # Derive which gyp configuration to use based on MOZ_DEBUG.
        c = "Debug" if config.substs.get("MOZ_DEBUG") else "Release"
        if c not in spec["configurations"]:
            raise RuntimeError("Missing %s gyp configuration for target %s "
                               "in %s" % (c, target_name, build_file))
        target_conf = spec["configurations"][c]

        if "actions" in spec:
            handle_actions(spec["actions"], context, action_overrides)
        if "copies" in spec:
            handle_copies(spec["copies"], context)

        use_libs = []
        libs = []

        def add_deps(s):
            for t in s.get("dependencies", []) + s.get("dependencies_original",
                                                       []):
                ty = targets[t]["type"]
                if ty in ("static_library", "shared_library"):
                    l = targets[t]["target_name"]
                    if l not in use_libs:
                        use_libs.append(l)
                # Manually expand out transitive dependencies--
                # gyp won't do this for static libs or none targets.
                if ty in ("static_library", "none"):
                    add_deps(targets[t])
            libs.extend(spec.get("libraries", []))

        # XXX: this sucks, but webrtc breaks with this right now because
        # it builds a library called 'gtest' and we just get lucky
        # that it isn't in USE_LIBS by that name anywhere.
        if no_chromium:
            add_deps(spec)

        os_libs = []
        for l in libs:
            if l.startswith("-"):
                if l not in os_libs:
                    os_libs.append(l)
            elif l.endswith(".lib"):
                l = l[:-4]
                if l not in os_libs:
                    os_libs.append(l)
            elif l:
                # For library names passed in from moz.build.
                l = os.path.basename(l)
                if l not in use_libs:
                    use_libs.append(l)

        if spec["type"] == "none":
            if not ("actions" in spec or "copies" in spec):
                continue
        elif spec["type"] in ("static_library", "shared_library",
                              "executable"):
            # Remove leading 'lib' from the target_name if any, and use as
            # library name.
            name = six.ensure_text(spec["target_name"])
            if spec["type"] in ("static_library", "shared_library"):
                if name.startswith("lib"):
                    name = name[3:]
                context["LIBRARY_NAME"] = name
            else:
                context["PROGRAM"] = name
            if spec["type"] == "shared_library":
                context["FORCE_SHARED_LIB"] = True
            elif (spec["type"] == "static_library" and spec.get(
                    "variables", {}).get("no_expand_libs", "0") == "1"):
                # PSM links a NSS static library, but our folded libnss
                # doesn't actually export everything that all of the
                # objects within would need, so that one library
                # should be built as a real static library.
                context["NO_EXPAND_LIBS"] = True
            if use_libs:
                context["USE_LIBS"] = sorted(use_libs, key=lambda s: s.lower())
            if os_libs:
                context["OS_LIBS"] = os_libs
            # gyp files contain headers and asm sources in sources lists.
            sources = []
            unified_sources = []
            extensions = set()
            use_defines_in_asflags = False
            for f in spec.get("sources", []):
                ext = mozpath.splitext(f)[-1]
                extensions.add(ext)
                if f.startswith("$INTERMEDIATE_DIR/"):
                    s = ObjDirPath(context, f.replace("$INTERMEDIATE_DIR/",
                                                      "!"))
                else:
                    s = SourcePath(context, f)
                if ext == ".h":
                    continue
                if ext == ".def":
                    context["SYMBOLS_FILE"] = s
                elif ext != ".S" and not no_unified and s not in non_unified_sources:
                    unified_sources.append(s)
                else:
                    sources.append(s)
                # The Mozilla build system doesn't use DEFINES for building
                # ASFILES.
                if ext == ".s":
                    use_defines_in_asflags = True

            # The context expects alphabetical order when adding sources
            context["SOURCES"] = alphabetical_sorted(sources)
            context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources)

            defines = target_conf.get("defines", [])
            if config.substs["CC_TYPE"] == "clang-cl" and no_chromium:
                msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
                # Hack: MsvsSettings._TargetConfig tries to compare a str to an int,
                # so convert manually.
                msvs_settings.vs_version.short_name = int(
                    msvs_settings.vs_version.short_name)
                defines.extend(msvs_settings.GetComputedDefines(c))
            for define in defines:
                if "=" in define:
                    name, value = define.split("=", 1)
                    context["DEFINES"][name] = value
                else:
                    context["DEFINES"][define] = True

            product_dir_dist = "$PRODUCT_DIR/dist/"
            for include in target_conf.get("include_dirs", []):
                if include.startswith(product_dir_dist):
                    # special-case includes of <(PRODUCT_DIR)/dist/ to match
                    # handle_copies above. This is used for NSS' exports.
                    include = "!/dist/include/" + include[len(product_dir_dist
                                                              ):]
                elif include.startswith(config.topobjdir):
                    # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
                    include = "!/" + mozpath.relpath(include, config.topobjdir)
                else:
                    # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
                    #
                    # NB: gyp files sometimes have actual absolute paths (e.g.
                    # /usr/include32) and sometimes paths that moz.build considers
                    # absolute, i.e. starting from topsrcdir. There's no good way
                    # to tell them apart here, and the actual absolute paths are
                    # likely bogus. In any event, actual absolute paths will be
                    # filtered out by trying to find them in topsrcdir.
                    #
                    # We do allow !- and %-prefixed paths, assuming they come
                    # from moz.build and will be handled the same way as if they
                    # were given to LOCAL_INCLUDES in moz.build.
                    if include.startswith("/"):
                        resolved = mozpath.abspath(
                            mozpath.join(config.topsrcdir, include[1:]))
                    elif not include.startswith(("!", "%")):
                        resolved = mozpath.abspath(
                            mozpath.join(mozpath.dirname(build_file), include))
                    if not include.startswith(
                        ("!", "%")) and not os.path.exists(resolved):
                        continue
                context["LOCAL_INCLUDES"] += [include]

            context["ASFLAGS"] = target_conf.get("asflags_mozilla", [])
            if use_defines_in_asflags and defines:
                context["ASFLAGS"] += ["-D" + d for d in defines]
            if config.substs["OS_TARGET"] == "SunOS":
                context["LDFLAGS"] = target_conf.get("ldflags", [])
            flags = target_conf.get("cflags_mozilla", [])
            if flags:
                suffix_map = {
                    ".c": "CFLAGS",
                    ".cpp": "CXXFLAGS",
                    ".cc": "CXXFLAGS",
                    ".m": "CMFLAGS",
                    ".mm": "CMMFLAGS",
                }
                variables = (suffix_map[e] for e in extensions
                             if e in suffix_map)
                for var in variables:
                    for f in flags:
                        # We may be getting make variable references out of the
                        # gyp data, and we don't want those in emitted data, so
                        # substitute them with their actual value.
                        f = expand_variables(f, config.substs).split()
                        if not f:
                            continue
                        # the result may be a string or a list.
                        if isinstance(f, six.string_types):
                            context[var].append(f)
                        else:
                            context[var].extend(f)
        else:
            # Ignore other types because we don't have
            # anything using them, and we're not testing them. They can be
            # added when that becomes necessary.
            raise NotImplementedError("Unsupported gyp target type: %s" %
                                      spec["type"])

        if not no_chromium:
            # Add some features to all contexts. Put here in case LOCAL_INCLUDES
            # order matters.
            context["LOCAL_INCLUDES"] += [
                "!/ipc/ipdl/_ipdlheaders",
                "/ipc/chromium/src",
                "/ipc/glue",
            ]
            # These get set via VC project file settings for normal GYP builds.
            if config.substs["OS_TARGET"] == "WINNT":
                context["DEFINES"]["UNICODE"] = True
                context["DEFINES"]["_UNICODE"] = True
        context["COMPILE_FLAGS"]["OS_INCLUDES"] = []

        for key, value in gyp_dir_attrs.sandbox_vars.items():
            if context.get(key) and isinstance(context[key], list):
                # If we have a key from sanbox_vars that's also been
                # populated here we use the value from sandbox_vars as our
                # basis rather than overriding outright.
                context[key] = value + context[key]
            elif context.get(key) and isinstance(context[key], dict):
                context[key].update(value)
            else:
                context[key] = value

        yield context