def test_expand_variables(self): self.assertEqual(expand_variables("$(var)", {"var": "value"}), "value") self.assertEqual( expand_variables("$(a) and $(b)", { "a": "1", "b": "2" }), "1 and 2") self.assertEqual( expand_variables("$(a) and $(undefined)", { "a": "1", "b": "2" }), "1 and ") self.assertEqual( expand_variables( "before $(string) between $(list) after", { "string": "abc", "list": ["a", "b", "c"] }, ), "before abc between a b c after", )
def test_expand_variables(self): self.assertEqual(expand_variables("$(var)", {"var": "value"}), "value") self.assertEqual(expand_variables("$(a) and $(b)", {"a": "1", "b": "2"}), "1 and 2") self.assertEqual(expand_variables("$(a) and $(undefined)", {"a": "1", "b": "2"}), "1 and ") self.assertEqual( expand_variables("before $(string) between $(list) after", {"string": "abc", "list": ["a", "b", "c"]}), "before abc between a b c after", )
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) local_extra = list(self._extra_includes[directory]) if directory not in self._gyp_dirs: for var in ( 'NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS', 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS', ): f = env.substs.get(var) if f: local_extra.extend(f) variables = { 'LOCAL_INCLUDES': self._includes[directory], 'DEFINES': self._defines[directory], 'EXTRA_INCLUDES': local_extra, 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: a = expand_variables(a, variables).split() if not a: continue if isinstance(a, types.StringTypes): c.append(a) else: c.extend(a) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': mozpath.join(directory, filename), }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.items(): env = self._envs[directory] cmd = self._build_cmd(cmd, filename, unified) variables = { "DIST": mozpath.join(env.topobjdir, "dist"), "DEPTH": env.topobjdir, "MOZILLA_DIR": env.topsrcdir, "topsrcdir": env.topsrcdir, "topobjdir": env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: accum = "" for word in expand_variables(a, variables).split(): # We can't just split() the output of expand_variables since # there can be spaces enclosed by quotes, e.g. '"foo bar"'. # Handle that case by checking whether there are an even # number of double-quotes in the word and appending it to # the accumulator if not. Meanwhile, shlex.split() and # mozbuild.shellutil.split() aren't able to properly handle # this and break in various ways, so we can't use something # off-the-shelf. has_quote = bool(word.count('"') % 2) if accum and has_quote: c.append(accum + " " + word) accum = "" elif accum and not has_quote: accum += " " + word elif not accum and has_quote: accum = word else: c.append(word) # Tell clangd to keep parsing to the end of a file, regardless of # how many errors are encountered. (Unified builds mean that we # encounter a lot of errors parsing some files.) c.insert(-1, "-ferror-limit=0") per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append( { "directory": directory, "command": " ".join(shell_quote(a) for a in c), "file": mozpath.join(directory, filename), } ) import json outputfile = self._outputfile_path() with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) local_extra = list(self._extra_includes[directory]) if directory not in self._gyp_dirs: for var in ( 'NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS', 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS', ): f = env.substs.get(var) if f: local_extra.extend(f) variables = { 'LOCAL_INCLUDES': self._includes[directory], 'DEFINES': self._defines[directory], 'EXTRA_INCLUDES': local_extra, 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: a = expand_variables(a, variables).split() if not a: continue if isinstance(a, types.StringTypes): c.append(a) else: c.extend(a) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': filename, }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) variables = { 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: accum = '' for word in expand_variables(a, variables).split(): # We can't just split() the output of expand_variables since # there can be spaces enclosed by quotes, e.g. '"foo bar"'. # Handle that case by checking whether there are an even # number of double-quotes in the word and appending it to # the accumulator if not. Meanwhile, shlex.split() and # mozbuild.shellutil.split() aren't able to properly handle # this and break in various ways, so we can't use something # off-the-shelf. has_quote = bool(word.count('"') % 2) if accum and has_quote: c.append(accum + ' ' + word) accum = '' elif accum and not has_quote: accum += ' ' + word elif not accum and has_quote: accum = word else: c.append(word) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': mozpath.join(directory, filename), }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def test_expand_variables(self): self.assertEqual(expand_variables('$(var)', {'var': 'value'}), 'value') self.assertEqual( expand_variables('$(a) and $(b)', { 'a': '1', 'b': '2' }), '1 and 2') self.assertEqual( expand_variables('$(a) and $(undefined)', { 'a': '1', 'b': '2' }), '1 and ') self.assertEqual( expand_variables('before $(string) between $(list) after', { 'string': 'abc', 'list': ['a', 'b', 'c'] }), 'before abc between a b c after')
def test_expand_variables(self): self.assertEqual( expand_variables('$(var)', {'var': 'value'}), 'value' ) self.assertEqual( expand_variables('$(a) and $(b)', {'a': '1', 'b': '2'}), '1 and 2' ) self.assertEqual( expand_variables('$(a) and $(undefined)', {'a': '1', 'b': '2'}), '1 and ' ) self.assertEqual( expand_variables('before $(string) between $(list) after', { 'string': 'abc', 'list': ['a', 'b', 'c'] }), 'before abc between a b c after' )
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) variables = { 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: a = expand_variables(a, variables).split() if not a: continue if isinstance(a, types.StringTypes): c.append(a) else: c.extend(a) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': mozpath.join(directory, filename), }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output, non_unified_sources, action_overrides): flat_list, targets, data = gyp_result no_chromium = gyp_dir_attrs.no_chromium no_unified = gyp_dir_attrs.no_unified # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs.get('MOZ_DEBUG') else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if 'actions' in spec: handle_actions(spec['actions'], context, action_overrides) if 'copies' in spec: handle_copies(spec['copies'], context) use_libs = [] libs = [] def add_deps(s): for t in s.get('dependencies', []) + s.get('dependencies_original', []): ty = targets[t]['type'] if ty in ('static_library', 'shared_library'): use_libs.append(targets[t]['target_name']) # Manually expand out transitive dependencies-- # gyp won't do this for static libs or none targets. if ty in ('static_library', 'none'): add_deps(targets[t]) libs.extend(spec.get('libraries', [])) #XXX: this sucks, but webrtc breaks with this right now because # it builds a library called 'gtest' and we just get lucky # that it isn't in USE_LIBS by that name anywhere. if no_chromium: add_deps(spec) os_libs = [] for l in libs: if l.startswith('-'): os_libs.append(l) elif l.endswith('.lib'): os_libs.append(l[:-4]) elif l: # For library names passed in from moz.build. use_libs.append(os.path.basename(l)) if spec['type'] == 'none': if not ('actions' in spec or 'copies' in spec): continue elif spec['type'] in ('static_library', 'shared_library', 'executable'): # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if spec['type'] in ('static_library', 'shared_library'): if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') else: context['PROGRAM'] = name.decode('utf-8') if spec['type'] == 'shared_library': context['FORCE_SHARED_LIB'] = True elif spec['type'] == 'static_library' and spec.get( 'variables', {}).get('no_expand_libs', '0') == '1': # PSM links a NSS static library, but our folded libnss # doesn't actually export everything that all of the # objects within would need, so that one library # should be built as a real static library. context['NO_EXPAND_LIBS'] = True if use_libs: context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower()) if os_libs: context['OS_LIBS'] = os_libs # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) if f.startswith('$INTERMEDIATE_DIR/'): s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!')) else: s = SourcePath(context, f) if ext == '.h': continue if ext == '.def': context['SYMBOLS_FILE'] = s elif ext != '.S' and not no_unified and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) defines = target_conf.get('defines', []) if config.substs['CC_TYPE'] == 'clang-cl' and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if '=' in define: name, value = define.split('=', 1) # The NSS gyp file doesn't expose a way to override this # currently, so we do so here. if name == 'NSS_ALLOW_SSLKEYLOGFILE' and config.substs.get( 'RELEASE_OR_BETA', False): continue context['DEFINES'][name] = value else: context['DEFINES'][define] = True product_dir_dist = '$PRODUCT_DIR/dist/' for include in target_conf.get('include_dirs', []): if include.startswith(product_dir_dist): # special-case includes of <(PRODUCT_DIR)/dist/ to match # handle_copies above. This is used for NSS' exports. include = '!/dist/include/' + include[len(product_dir_dist ):] elif include.startswith(config.topobjdir): # NSPR_INCLUDE_DIR gets passed into the NSS build this way. include = '!/' + mozpath.relpath(include, config.topobjdir) else: # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. # # We do allow !- and %-prefixed paths, assuming they come # from moz.build and will be handled the same way as if they # were given to LOCAL_INCLUDES in moz.build. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) elif not include.startswith(('!', '%')): resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not include.startswith( ('!', '%')) and not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context['ASFLAGS'] += ['-D' + d for d in defines] flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context[var].append(f) else: context[var].extend(f) else: # Ignore other types because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) if not no_chromium: # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['COMPILE_FLAGS']['OS_INCLUDES'] = [] for key, value in gyp_dir_attrs.sandbox_vars.items(): if context.get(key) and isinstance(context[key], list): # If we have a key from sanbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context[key] = value + context[key] elif context.get(key) and isinstance(context[key], dict): context[key].update(value) else: context[key] = value yield context
def read_from_gyp(config, path, output, vars, no_chromium, no_unified, action_overrides, non_unified_sources=set()): """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration file, output is the base path under which the objdir for the various gyp dependencies will be, and vars a dict of variables to pass to the gyp processor. """ is_win = config.substs['OS_TARGET'] == 'WINNT' is_msvc = bool(config.substs['_MSC_VER']) # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) str_vars = dict((name, encode(value)) for name, value in vars.items()) if is_msvc: # This isn't actually used anywhere in this generator, but it's needed # to override the registry detection of VC++ in gyp. os.environ['GYP_MSVS_OVERRIDE_PATH'] = 'fake_path' os.environ['GYP_MSVS_VERSION'] = config.substs['MSVS_VERSION'] params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], b'root_targets': None, } if no_chromium: includes = [] depth = mozpath.dirname(path) else: depth = chrome_src # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src, find_executables=False) includes.extend( encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) # Read the given gyp file and its dependencies. generator, flat_list, targets, data = \ gyp.Load([path], format=b'mozbuild', default_variables=str_vars, includes=includes, depth=encode(depth), params=params) # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if 'actions' in spec: handle_actions(spec['actions'], context, action_overrides) if 'copies' in spec: handle_copies(spec['copies'], context) use_libs = [] libs = [] def add_deps(s): for t in s.get('dependencies', []) + s.get('dependencies_original', []): ty = targets[t]['type'] if ty in ('static_library', 'shared_library'): use_libs.append(targets[t]['target_name']) # Manually expand out transitive dependencies-- # gyp won't do this for static libs or none targets. if ty in ('static_library', 'none'): add_deps(targets[t]) libs.extend(spec.get('libraries', [])) #XXX: this sucks, but webrtc breaks with this right now because # it builds a library called 'gtest' and we just get lucky # that it isn't in USE_LIBS by that name anywhere. if no_chromium: add_deps(spec) os_libs = [] for l in libs: if l.startswith('-'): os_libs.append(l) elif l.endswith('.lib'): os_libs.append(l[:-4]) elif l: # For library names passed in from moz.build. use_libs.append(os.path.basename(l)) if spec['type'] == 'none': if not ('actions' in spec or 'copies' in spec): continue elif spec['type'] in ('static_library', 'shared_library', 'executable'): # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if spec['type'] in ('static_library', 'shared_library'): if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') else: context['PROGRAM'] = name.decode('utf-8') if spec['type'] == 'shared_library': context['FORCE_SHARED_LIB'] = True elif spec['type'] == 'static_library' and spec.get( 'variables', {}).get('no_expand_libs', '0') == '1': # PSM links a NSS static library, but our folded libnss # doesn't actually export everything that all of the # objects within would need, so that one library # should be built as a real static library. context['NO_EXPAND_LIBS'] = True if use_libs: context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower()) if os_libs: context['OS_LIBS'] = os_libs # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) if f.startswith('$INTERMEDIATE_DIR/'): s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!')) else: s = SourcePath(context, f) if ext == '.h': continue if ext == '.def': context['SYMBOLS_FILE'] = s elif ext != '.S' and not no_unified and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) defines = target_conf.get('defines', []) if is_msvc and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True product_dir_dist = '$PRODUCT_DIR/dist/' for include in target_conf.get('include_dirs', []): if include.startswith(product_dir_dist): # special-case includes of <(PRODUCT_DIR)/dist/ to match # handle_copies above. This is used for NSS' exports. include = '!/dist/include/' + include[len(product_dir_dist ):] elif include.startswith(config.topobjdir): # NSPR_INCLUDE_DIR gets passed into the NSS build this way. include = '!/' + mozpath.relpath(include, config.topobjdir) else: # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context['ASFLAGS'] += ['-D' + d for d in defines] flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context[var].append(f) else: context[var].extend(f) else: # Ignore other types because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) if not no_chromium: # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if is_win: context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True yield context
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources, sandbox_vars, mozilla_flags): # Translates a json gn config into attributes that can be used to write out # moz.build files for this configuration. # Much of this code is based on similar functionality in `gyp_reader.py`. mozbuild_attrs = { "mozbuild_args": gn_config.get("mozbuild_args", None), "dirs": {} } targets = gn_config["targets"] project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir) non_unified_sources = set( [mozpath.normpath(s) for s in non_unified_sources]) def target_info(fullname): path, name = target_fullname.split(":") # Stripping '//' gives us a path relative to the project root, # adding a suffix avoids name collisions with libraries already # in the tree (like "webrtc"). return path.lstrip("//"), name + "_gn" # Process all targets from the given gn project and its dependencies. for target_fullname, spec in six.iteritems(targets): target_path, target_name = target_info(target_fullname) context_attrs = {} # Remove leading 'lib' from the target_name if any, and use as # library name. name = target_name if spec["type"] in ("static_library", "shared_library", "source_set"): if name.startswith("lib"): name = name[3:] context_attrs["LIBRARY_NAME"] = six.ensure_text(name) else: raise Exception("The following GN target type is not currently " 'consumed by moz.build: "%s". It may need to be ' "added, or you may need to re-run the " "`GnConfigGen` step." % spec["type"]) if spec["type"] == "shared_library": context_attrs["FORCE_SHARED_LIB"] = True sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get("sources", []): f = f.lstrip("//") ext = mozpath.splitext(f)[-1] extensions.add(ext) src = "%s/%s" % (project_relsrcdir, f) if ext == ".h": continue elif ext == ".def": context_attrs["SYMBOLS_FILE"] = src elif ext != ".S" and src not in non_unified_sources: unified_sources.append("/%s" % src) else: sources.append("/%s" % src) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == ".s": use_defines_in_asflags = True context_attrs["SOURCES"] = sources context_attrs["UNIFIED_SOURCES"] = unified_sources context_attrs["DEFINES"] = {} for define in spec.get("defines", []): if "=" in define: name, value = define.split("=", 1) context_attrs["DEFINES"][name] = value else: context_attrs["DEFINES"][define] = True context_attrs["LOCAL_INCLUDES"] = [] for include in spec.get("include_dirs", []): # GN will have resolved all these paths relative to the root of # the project indicated by "//". if include.startswith("//"): include = include[2:] # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. if include.startswith("/"): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath(mozpath.join(srcdir, include)) if not os.path.exists(resolved): # GN files may refer to include dirs that are outside of the # tree or we simply didn't vendor. Print a warning in this case. if not resolved.endswith("gn-output/gen"): print( "Included path: '%s' does not exist, dropping include from GN " "configuration." % resolved, file=sys.stderr, ) continue if not include.startswith("/"): include = "/%s/%s" % (project_relsrcdir, include) context_attrs["LOCAL_INCLUDES"] += [include] context_attrs["ASFLAGS"] = spec.get("asflags_mozilla", []) if use_defines_in_asflags and context_attrs["DEFINES"]: context_attrs["ASFLAGS"] += [ "-D" + d for d in context_attrs["DEFINES"] ] flags = [_f for _f in spec.get("cflags", []) if _f in mozilla_flags] if flags: suffix_map = { ".c": "CFLAGS", ".cpp": "CXXFLAGS", ".cc": "CXXFLAGS", ".m": "CMFLAGS", ".mm": "CMMFLAGS", } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gn data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, six.string_types): context_attrs.setdefault(var, []).append(f) else: context_attrs.setdefault(var, []).extend(f) context_attrs["OS_LIBS"] = [] for lib in spec.get("libs", []): lib_name = os.path.splitext(lib)[0] if lib.endswith(".framework"): context_attrs["OS_LIBS"] += ["-framework " + lib_name] else: context_attrs["OS_LIBS"] += [lib_name] # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context_attrs["LOCAL_INCLUDES"] += [ "!/ipc/ipdl/_ipdlheaders", "/ipc/chromium/src", "/ipc/glue", "/tools/profiler/public", ] # These get set via VC project file settings for normal GYP builds. # TODO: Determine if these defines are needed for GN builds. if gn_config["mozbuild_args"]["OS_TARGET"] == "WINNT": context_attrs["DEFINES"]["UNICODE"] = True context_attrs["DEFINES"]["_UNICODE"] = True context_attrs["COMPILE_FLAGS"] = {"OS_INCLUDES": []} for key, value in sandbox_vars.items(): if context_attrs.get(key) and isinstance(context_attrs[key], list): # If we have a key from sandbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context_attrs[key] = value + context_attrs[key] elif context_attrs.get(key) and isinstance(context_attrs[key], dict): context_attrs[key].update(value) else: context_attrs[key] = value target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name) mozbuild_attrs["dirs"][target_relsrcdir] = context_attrs return mozbuild_attrs
def read_from_gyp(config, path, output, vars, non_unified_sources=set()): """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration file, output is the base path under which the objdir for the various gyp dependencies will be, and vars a dict of variables to pass to the gyp processor. """ # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) str_vars = dict((name, encode(value)) for name, value in vars.items()) params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], } # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src, find_executables=False) includes.extend( encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) # Read the given gyp file and its dependencies. generator, flat_list, targets, data = \ gyp.Load([path], format=b'mozbuild', default_variables=str_vars, includes=includes, depth=encode(mozpath.dirname(path)), params=params) # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if spec['type'] == 'none': continue elif spec['type'] == 'static_library': # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) s = SourcePath(context, f) if ext == '.h': continue if ext != '.S' and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) for define in target_conf.get('defines', []): if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True for include in target_conf.get('include_dirs', []): # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs) if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context[var].append(f) else: context[var].extend(f) else: # Ignore other types than static_library because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True yield context
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output, non_unified_sources, action_overrides): flat_list, targets, data = gyp_result no_chromium = gyp_dir_attrs.no_chromium no_unified = gyp_dir_attrs.no_unified # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext(config, mozpath.relpath( mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source(mozpath.abspath(mozpath.join( mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if 'actions' in spec: handle_actions(spec['actions'], context, action_overrides) if 'copies' in spec: handle_copies(spec['copies'], context) use_libs = [] libs = [] def add_deps(s): for t in s.get('dependencies', []) + s.get('dependencies_original', []): ty = targets[t]['type'] if ty in ('static_library', 'shared_library'): use_libs.append(targets[t]['target_name']) # Manually expand out transitive dependencies-- # gyp won't do this for static libs or none targets. if ty in ('static_library', 'none'): add_deps(targets[t]) libs.extend(spec.get('libraries', [])) #XXX: this sucks, but webrtc breaks with this right now because # it builds a library called 'gtest' and we just get lucky # that it isn't in USE_LIBS by that name anywhere. if no_chromium: add_deps(spec) os_libs = [] for l in libs: if l.startswith('-'): os_libs.append(l) elif l.endswith('.lib'): os_libs.append(l[:-4]) elif l: # For library names passed in from moz.build. use_libs.append(os.path.basename(l)) if spec['type'] == 'none': if not ('actions' in spec or 'copies' in spec): continue elif spec['type'] in ('static_library', 'shared_library', 'executable'): # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if spec['type'] in ('static_library', 'shared_library'): if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') else: context['PROGRAM'] = name.decode('utf-8') if spec['type'] == 'shared_library': context['FORCE_SHARED_LIB'] = True elif spec['type'] == 'static_library' and spec.get('variables', {}).get('no_expand_libs', '0') == '1': # PSM links a NSS static library, but our folded libnss # doesn't actually export everything that all of the # objects within would need, so that one library # should be built as a real static library. context['NO_EXPAND_LIBS'] = True if use_libs: context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower()) if os_libs: context['OS_LIBS'] = os_libs # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) if f.startswith('$INTERMEDIATE_DIR/'): s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!')) else: s = SourcePath(context, f) if ext == '.h': continue if ext == '.def': context['SYMBOLS_FILE'] = s elif ext != '.S' and not no_unified and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) defines = target_conf.get('defines', []) if bool(config.substs['_MSC_VER']) and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True product_dir_dist = '$PRODUCT_DIR/dist/' for include in target_conf.get('include_dirs', []): if include.startswith(product_dir_dist): # special-case includes of <(PRODUCT_DIR)/dist/ to match # handle_copies above. This is used for NSS' exports. include = '!/dist/include/' + include[len(product_dir_dist):] elif include.startswith(config.topobjdir): # NSPR_INCLUDE_DIR gets passed into the NSS build this way. include = '!/' + mozpath.relpath(include, config.topobjdir) else: # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context['ASFLAGS'] += ['-D' + d for d in defines] flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = ( suffix_map[e] for e in extensions if e in suffix_map ) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context[var].append(f) else: context[var].extend(f) else: # Ignore other types because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) if not no_chromium: # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True for key, value in gyp_dir_attrs.sandbox_vars.items(): if context.get(key) and isinstance(context[key], list): # If we have a key from sanbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context[key] = value + context[key] else: context[key] = value yield context
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources, sandbox_vars, mozilla_flags): # Translates a json gn config into attributes that can be used to write out # moz.build files for this configuration. # Much of this code is based on similar functionality in `gyp_reader.py`. mozbuild_attrs = {'mozbuild_args': gn_config.get('mozbuild_args', None), 'dirs': {}} targets = gn_config["targets"] project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir) def target_info(fullname): path, name = target_fullname.split(':') # Stripping '//' gives us a path relative to the project root, # adding a suffix avoids name collisions with libraries already # in the tree (like "webrtc"). return path.lstrip('//'), name + '_gn' # Process all targets from the given gn project and its dependencies. for target_fullname, spec in targets.iteritems(): target_path, target_name = target_info(target_fullname) context_attrs = {} # Remove leading 'lib' from the target_name if any, and use as # library name. name = target_name if spec['type'] in ('static_library', 'shared_library', 'source_set'): if name.startswith('lib'): name = name[3:] context_attrs['LIBRARY_NAME'] = name.decode('utf-8') else: raise Exception('The following GN target type is not currently ' 'consumed by moz.build: "%s". It may need to be ' 'added, or you may need to re-run the ' '`GnConfigGen` step.' % spec['type']) if spec['type'] == 'shared_library': context_attrs['FORCE_SHARED_LIB'] = True sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): f = f.lstrip("//") ext = mozpath.splitext(f)[-1] extensions.add(ext) src = '%s/%s' % (project_relsrcdir, f) if ext == '.h': continue elif ext == '.def': context_attrs['SYMBOLS_FILE'] = src elif ext != '.S' and src not in non_unified_sources: unified_sources.append('/%s' % src) else: sources.append('/%s' % src) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True context_attrs['SOURCES'] = sources context_attrs['UNIFIED_SOURCES'] = unified_sources context_attrs['DEFINES'] = {} for define in spec.get('defines', []): if '=' in define: name, value = define.split('=', 1) context_attrs['DEFINES'][name] = value else: context_attrs['DEFINES'][define] = True context_attrs['LOCAL_INCLUDES'] = [] for include in spec.get('include_dirs', []): # GN will have resolved all these paths relative to the root of # the project indicated by "//". if include.startswith('//'): include = include[2:] # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. if include.startswith('/'): resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath(mozpath.join(srcdir, include)) if not os.path.exists(resolved): # GN files may refer to include dirs that are outside of the # tree or we simply didn't vendor. Print a warning in this case. if not resolved.endswith('gn-output/gen'): print("Included path: '%s' does not exist, dropping include from GN " "configuration." % resolved, file=sys.stderr) continue if not include.startswith('/'): include = '/%s/%s' % (project_relsrcdir, include) context_attrs['LOCAL_INCLUDES'] += [include] context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context_attrs['ASFLAGS'] += ['-D' + d for d in defines] flags = [f for f in spec.get('cflags', []) if f in mozilla_flags] if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gn data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context_attrs.setdefault(var, []).append(f) else: context_attrs.setdefault(var, []).extend(f) context_attrs['OS_LIBS'] = [] for lib in spec.get('libs', []): lib_name = os.path.splitext(lib)[0] if lib.endswith('.framework'): context_attrs['OS_LIBS'] += ['-framework ' + lib_name] else: context_attrs['OS_LIBS'] += [lib_name] # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context_attrs['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. # TODO: Determine if these defines are needed for GN builds. if gn_config['mozbuild_args']['OS_TARGET'] == 'WINNT': context_attrs['DEFINES']['UNICODE'] = True context_attrs['DEFINES']['_UNICODE'] = True context_attrs['COMPILE_FLAGS'] = { 'STL': [], 'OS_INCLUDES': [], } for key, value in sandbox_vars.items(): if context_attrs.get(key) and isinstance(context_attrs[key], list): # If we have a key from sandbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context_attrs[key] = value + context_attrs[key] elif context_attrs.get(key) and isinstance(context_attrs[key], dict): context_attrs[key].update(value) else: context_attrs[key] = value target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name) mozbuild_attrs['dirs'][target_relsrcdir] = context_attrs return mozbuild_attrs
def process_gyp_result( gyp_result, gyp_dir_attrs, path, config, output, non_unified_sources, action_overrides, ): flat_list, targets, data = gyp_result no_chromium = gyp_dir_attrs.no_chromium no_unified = gyp_dir_attrs.no_unified # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in sorted( gyp.common.AllTargets(flat_list, targets, path.replace("/", os.sep))): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = "%s_%s" % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir), ) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]["included_files"]: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = "Debug" if config.substs.get("MOZ_DEBUG") else "Release" if c not in spec["configurations"]: raise RuntimeError("Missing %s gyp configuration for target %s " "in %s" % (c, target_name, build_file)) target_conf = spec["configurations"][c] if "actions" in spec: handle_actions(spec["actions"], context, action_overrides) if "copies" in spec: handle_copies(spec["copies"], context) use_libs = [] libs = [] def add_deps(s): for t in s.get("dependencies", []) + s.get("dependencies_original", []): ty = targets[t]["type"] if ty in ("static_library", "shared_library"): l = targets[t]["target_name"] if l not in use_libs: use_libs.append(l) # Manually expand out transitive dependencies-- # gyp won't do this for static libs or none targets. if ty in ("static_library", "none"): add_deps(targets[t]) libs.extend(spec.get("libraries", [])) # XXX: this sucks, but webrtc breaks with this right now because # it builds a library called 'gtest' and we just get lucky # that it isn't in USE_LIBS by that name anywhere. if no_chromium: add_deps(spec) os_libs = [] for l in libs: if l.startswith("-"): if l not in os_libs: os_libs.append(l) elif l.endswith(".lib"): l = l[:-4] if l not in os_libs: os_libs.append(l) elif l: # For library names passed in from moz.build. l = os.path.basename(l) if l not in use_libs: use_libs.append(l) if spec["type"] == "none": if not ("actions" in spec or "copies" in spec): continue elif spec["type"] in ("static_library", "shared_library", "executable"): # Remove leading 'lib' from the target_name if any, and use as # library name. name = six.ensure_text(spec["target_name"]) if spec["type"] in ("static_library", "shared_library"): if name.startswith("lib"): name = name[3:] context["LIBRARY_NAME"] = name else: context["PROGRAM"] = name if spec["type"] == "shared_library": context["FORCE_SHARED_LIB"] = True elif (spec["type"] == "static_library" and spec.get( "variables", {}).get("no_expand_libs", "0") == "1"): # PSM links a NSS static library, but our folded libnss # doesn't actually export everything that all of the # objects within would need, so that one library # should be built as a real static library. context["NO_EXPAND_LIBS"] = True if use_libs: context["USE_LIBS"] = sorted(use_libs, key=lambda s: s.lower()) if os_libs: context["OS_LIBS"] = os_libs # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get("sources", []): ext = mozpath.splitext(f)[-1] extensions.add(ext) if f.startswith("$INTERMEDIATE_DIR/"): s = ObjDirPath(context, f.replace("$INTERMEDIATE_DIR/", "!")) else: s = SourcePath(context, f) if ext == ".h": continue if ext == ".def": context["SYMBOLS_FILE"] = s elif ext != ".S" and not no_unified and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == ".s": use_defines_in_asflags = True # The context expects alphabetical order when adding sources context["SOURCES"] = alphabetical_sorted(sources) context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources) defines = target_conf.get("defines", []) if config.substs["CC_TYPE"] == "clang-cl" and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) # Hack: MsvsSettings._TargetConfig tries to compare a str to an int, # so convert manually. msvs_settings.vs_version.short_name = int( msvs_settings.vs_version.short_name) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if "=" in define: name, value = define.split("=", 1) context["DEFINES"][name] = value else: context["DEFINES"][define] = True product_dir_dist = "$PRODUCT_DIR/dist/" for include in target_conf.get("include_dirs", []): if include.startswith(product_dir_dist): # special-case includes of <(PRODUCT_DIR)/dist/ to match # handle_copies above. This is used for NSS' exports. include = "!/dist/include/" + include[len(product_dir_dist ):] elif include.startswith(config.topobjdir): # NSPR_INCLUDE_DIR gets passed into the NSS build this way. include = "!/" + mozpath.relpath(include, config.topobjdir) else: # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. # # We do allow !- and %-prefixed paths, assuming they come # from moz.build and will be handled the same way as if they # were given to LOCAL_INCLUDES in moz.build. if include.startswith("/"): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) elif not include.startswith(("!", "%")): resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not include.startswith( ("!", "%")) and not os.path.exists(resolved): continue context["LOCAL_INCLUDES"] += [include] context["ASFLAGS"] = target_conf.get("asflags_mozilla", []) if use_defines_in_asflags and defines: context["ASFLAGS"] += ["-D" + d for d in defines] if config.substs["OS_TARGET"] == "SunOS": context["LDFLAGS"] = target_conf.get("ldflags", []) flags = target_conf.get("cflags_mozilla", []) if flags: suffix_map = { ".c": "CFLAGS", ".cpp": "CXXFLAGS", ".cc": "CXXFLAGS", ".m": "CMFLAGS", ".mm": "CMMFLAGS", } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, six.string_types): context[var].append(f) else: context[var].extend(f) else: # Ignore other types because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError("Unsupported gyp target type: %s" % spec["type"]) if not no_chromium: # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context["LOCAL_INCLUDES"] += [ "!/ipc/ipdl/_ipdlheaders", "/ipc/chromium/src", "/ipc/glue", ] # These get set via VC project file settings for normal GYP builds. if config.substs["OS_TARGET"] == "WINNT": context["DEFINES"]["UNICODE"] = True context["DEFINES"]["_UNICODE"] = True context["COMPILE_FLAGS"]["OS_INCLUDES"] = [] for key, value in gyp_dir_attrs.sandbox_vars.items(): if context.get(key) and isinstance(context[key], list): # If we have a key from sanbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context[key] = value + context[key] elif context.get(key) and isinstance(context[key], dict): context[key].update(value) else: context[key] = value yield context
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources, sandbox_vars, mozilla_flags): # Translates a json gn config into attributes that can be used to write out # moz.build files for this configuration. # Much of this code is based on similar functionality in `gyp_reader.py`. mozbuild_attrs = { 'mozbuild_args': gn_config.get('mozbuild_args', None), 'dirs': {} } targets = gn_config["targets"] project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir) def target_info(fullname): path, name = target_fullname.split(':') # Stripping '//' gives us a path relative to the project root, # adding a suffix avoids name collisions with libraries already # in the tree (like "webrtc"). return path.lstrip('//'), name + '_gn' # Process all targets from the given gn project and its dependencies. for target_fullname, spec in targets.iteritems(): target_path, target_name = target_info(target_fullname) context_attrs = {} # Remove leading 'lib' from the target_name if any, and use as # library name. name = target_name if spec['type'] in ('static_library', 'shared_library', 'source_set'): if name.startswith('lib'): name = name[3:] context_attrs['LIBRARY_NAME'] = name.decode('utf-8') else: raise Exception('The following GN target type is not currently ' 'consumed by moz.build: "%s". It may need to be ' 'added, or you may need to re-run the ' '`GnConfigGen` step.' % spec['type']) if spec['type'] == 'shared_library': context_attrs['FORCE_SHARED_LIB'] = True sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): f = f.lstrip("//") ext = mozpath.splitext(f)[-1] extensions.add(ext) src = '%s/%s' % (project_relsrcdir, f) if ext == '.h': continue elif ext == '.def': context_attrs['SYMBOLS_FILE'] = src elif ext != '.S' and src not in non_unified_sources: unified_sources.append('/%s' % src) else: sources.append('/%s' % src) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True context_attrs['SOURCES'] = sources context_attrs['UNIFIED_SOURCES'] = unified_sources context_attrs['DEFINES'] = {} for define in spec.get('defines', []): if '=' in define: name, value = define.split('=', 1) context_attrs['DEFINES'][name] = value else: context_attrs['DEFINES'][define] = True context_attrs['LOCAL_INCLUDES'] = [] for include in spec.get('include_dirs', []): # GN will have resolved all these paths relative to the root of # the project indicated by "//". if include.startswith('//'): include = include[2:] # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath(mozpath.join(srcdir, include)) if not os.path.exists(resolved): # GN files may refer to include dirs that are outside of the # tree or we simply didn't vendor. Print a warning in this case. if not resolved.endswith('gn-output/gen'): print( "Included path: '%s' does not exist, dropping include from GN " "configuration." % resolved, file=sys.stderr) continue if not include.startswith('/'): include = '/%s/%s' % (project_relsrcdir, include) context_attrs['LOCAL_INCLUDES'] += [include] context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context_attrs['ASFLAGS'] += ['-D' + d for d in defines] flags = [f for f in spec.get('cflags', []) if f in mozilla_flags] if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gn data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context_attrs.setdefault(var, []).append(f) else: context_attrs.setdefault(var, []).extend(f) context_attrs['OS_LIBS'] = [] for lib in spec.get('libs', []): lib_name = os.path.splitext(lib)[0] if lib.endswith('.framework'): context_attrs['OS_LIBS'] += ['-framework ' + lib_name] else: context_attrs['OS_LIBS'] += [lib_name] # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context_attrs['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. # TODO: Determine if these defines are needed for GN builds. if gn_config['mozbuild_args']['OS_TARGET'] == 'WINNT': context_attrs['DEFINES']['UNICODE'] = True context_attrs['DEFINES']['_UNICODE'] = True context_attrs['COMPILE_FLAGS'] = { 'STL': [], 'OS_INCLUDES': [], } for key, value in sandbox_vars.items(): if context_attrs.get(key) and isinstance(context_attrs[key], list): # If we have a key from sandbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context_attrs[key] = value + context_attrs[key] elif context_attrs.get(key) and isinstance(context_attrs[key], dict): context_attrs[key].update(value) else: context_attrs[key] = value target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name) mozbuild_attrs['dirs'][target_relsrcdir] = context_attrs return mozbuild_attrs
def read_from_gyp(config, path, output, vars, non_unified_sources = set()): """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration file, output is the base path under which the objdir for the various gyp dependencies will be, and vars a dict of variables to pass to the gyp processor. """ # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) str_vars = dict((name, encode(value)) for name, value in vars.items()) params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], } # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src, find_executables=False) includes.extend(encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) # Read the given gyp file and its dependencies. generator, flat_list, targets, data = \ gyp.Load([path], format=b'mozbuild', default_variables=str_vars, includes=includes, depth=encode(mozpath.dirname(path)), params=params) # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext(config, mozpath.relpath( mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source(mozpath.abspath(mozpath.join( mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if spec['type'] == 'none': continue elif spec['type'] == 'static_library': # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) s = SourcePath(context, f) if ext == '.h': continue if ext != '.S' and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) for define in target_conf.get('defines', []): if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True for include in target_conf.get('include_dirs', []): # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath(mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = ( suffix_map[e] for e in extensions if e in suffix_map ) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs) if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context[var].append(f) else: context[var].extend(f) else: # Ignore other types than static_library because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True yield context