def create_chromium_fetch_task(config, name, fetch): artifact_name = fetch.get('artifact-name') workdir = '/builds/worker' platform = fetch.get('platform') revision = fetch.get('revision') args = '--platform ' + shell_quote(platform) if revision: args += ' --revision ' + shell_quote(revision) cmd = [ 'bash', '-c', 'cd {} && ' '/usr/bin/python3 {} {}'.format(workdir, fetch['script'], args) ] return { 'command': cmd, 'artifact_name': artifact_name, 'digest_data': [ "revision={}".format(revision), "platform={}".format(platform), "artifact_name={}".format(artifact_name), ], }
def gen_sources_rules(self, extra_inputs): sources = self.sources host_sources = self.host_sources as_dash_c = self.variables.get('AS_DASH_C_FLAG', self.environment.substs['AS_DASH_C_FLAG']) compilers = [ (sources['.S'], 'AS', 'SFLAGS', '-c', ''), (sources['.s'], 'AS', 'ASFLAGS', as_dash_c, ''), (sources['.cpp'], 'CXX', 'CXXFLAGS', '-c', ''), (sources['.c'], 'CC', 'CFLAGS', '-c', ''), (host_sources['.cpp'], 'HOST_CXX', 'HOST_CXXFLAGS', '-c', 'host_'), (host_sources['.c'], 'HOST_CC', 'HOST_CFLAGS', '-c', 'host_'), ] for srcs, compiler, flags, dash_c, prefix in compilers: for src in sorted(srcs): # AS can be set to $(CC), so we need to call expand_variables on # the compiler to get the real value. compiler_value = self.variables.get(compiler, self.environment.substs[compiler]) cmd = [expand_variables(compiler_value, self.environment.substs)] cmd.extend(shell_quote(f) for f in self.local_flags[flags]) cmd.extend(shell_quote(f) for f in self.per_source_flags[src]) cmd.extend([dash_c, '%f', '-o', '%o']) self.rule( cmd=cmd, inputs=[src], extra_inputs=extra_inputs, outputs=[prefix + '%B.o'], display='%s %%f' % compiler, )
def create_chromium_fetch_task(config, name, fetch): artifact_name = fetch.get("artifact-name") workdir = "/builds/worker" platform = fetch.get("platform") revision = fetch.get("revision") args = "--platform " + shell_quote(platform) if revision: args += " --revision " + shell_quote(revision) cmd = [ "bash", "-c", "cd {} && " "/usr/bin/python3 {} {}".format(workdir, fetch["script"], args), ] return { "command": cmd, "artifact_name": artifact_name, "digest_data": [ "revision={}".format(revision), "platform={}".format(platform), "artifact_name={}".format(artifact_name), ], }
def gen_sources_rules(self, extra_inputs): sources = self.sources host_sources = self.host_sources as_dash_c = self.variables.get( 'AS_DASH_C_FLAG', self.environment.substs['AS_DASH_C_FLAG']) compilers = [ (sources['.S'], 'AS', 'SFLAGS', '-c', ''), (sources['.s'], 'AS', 'ASFLAGS', as_dash_c, ''), (sources['.cpp'], 'CXX', 'CXXFLAGS', '-c', ''), (sources['.c'], 'CC', 'CFLAGS', '-c', ''), (host_sources['.cpp'], 'HOST_CXX', 'HOST_CXXFLAGS', '-c', 'host_'), (host_sources['.c'], 'HOST_CC', 'HOST_CFLAGS', '-c', 'host_'), ] for srcs, compiler, flags, dash_c, prefix in compilers: for src in sorted(srcs): # AS can be set to $(CC), so we need to call expand_variables on # the compiler to get the real value. compiler_value = self.variables.get( compiler, self.environment.substs[compiler]) cmd = [ expand_variables(compiler_value, self.environment.substs) ] cmd.extend(shell_quote(f) for f in self.local_flags[flags]) cmd.extend(shell_quote(f) for f in self.per_source_flags[src]) cmd.extend([dash_c, '%f', '-o', '%o']) self.rule( cmd=cmd, inputs=[src], extra_inputs=extra_inputs, outputs=[prefix + '%B.o'], display='%s %%f' % compiler, )
def gen_sources_rules(self, extra_inputs): compilers = [ ('.S', 'AS', 'ASFLAGS'), ('.cpp', 'CXX', 'CXXFLAGS'), ('.c', 'CC', 'CFLAGS'), ] for extension, compiler, flags in compilers: srcs = sorted(self.sources[extension]) for src in srcs: # AS can be set to $(CC), so we need to call expand_variables on # the compiler to get the real value. cmd = [ expand_variables(self.environment.substs[compiler], self.environment.substs) ] cmd.extend(shell_quote(f) for f in self.local_flags[flags]) cmd.extend(shell_quote(f) for f in self.per_source_flags[src]) cmd.extend(['-c', '%f', '-o', '%o']) self.rule( cmd=cmd, inputs=[src], extra_inputs=extra_inputs, outputs=['%B.o'], display='%s %%f' % compiler, )
def create_chromium_fetch_task(config, job): name = job['name'] fetch = job['fetch'] artifact_name = fetch.get('artifact-name') workdir = '/builds/worker' platform = fetch.get('platform') revision = fetch.get('revision') args = '--platform ' + shell_quote(platform) if revision: args += ' --revision ' + shell_quote(revision) cmd = [ 'bash', '-c', 'cd {} && ' '/usr/bin/python3 {} {}'.format( workdir, fetch['script'], args ) ] env = { 'UPLOAD_DIR': '/builds/worker/artifacts' } task = make_base_task(config, name, job['description'], cmd) task['treeherder']['symbol'] = join_symbol('Fetch-URL', name) task['worker']['artifacts'] = [{ 'type': 'directory', 'name': 'public', 'path': '/builds/worker/artifacts', }] task['worker']['env'] = env task['attributes']['fetch-artifact'] = 'public/%s' % artifact_name if not taskgraph.fast: cache_name = task['label'].replace('{}-'.format(config.kind), '', 1) # This adds the level to the index path automatically. add_optimization( config, task, cache_type=CACHE_TYPE, cache_name=cache_name, digest_data=[ "revision={}".format(revision), "platform={}".format(platform), "artifact_name={}".format(artifact_name), ], ) return task
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) local_extra = list(self._extra_includes[directory]) if directory not in self._gyp_dirs: for var in ( 'NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS', 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS', ): f = env.substs.get(var) if f: local_extra.extend(f) variables = { 'LOCAL_INCLUDES': self._includes[directory], 'DEFINES': self._defines[directory], 'EXTRA_INCLUDES': local_extra, 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: a = expand_variables(a, variables).split() if not a: continue if isinstance(a, types.StringTypes): c.append(a) else: c.extend(a) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': mozpath.join(directory, filename), }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def _build_db_line(self, objdir, cenv, filename, canonical_suffix, flags, ishost): # Distinguish between host and target files. prefix = 'HOST_' if ishost else '' if canonical_suffix in ('.c', '.m'): compiler = cenv.substs[prefix + 'CC'] cflags = list(flags['COMPILE_CFLAGS']) # Add the Objective-C flags if needed. if canonical_suffix == '.m': cflags.extend(flags['COMPILE_CMFLAGS']) elif canonical_suffix in ('.cpp', '.mm'): compiler = cenv.substs[prefix + 'CXX'] cflags = list(flags['COMPILE_CXXFLAGS']) # Add the Objective-C++ flags if needed. if canonical_suffix == '.mm': cflags.extend(flags['COMPILE_CMMFLAGS']) else: return cmd = compiler.split() + [ '-o', '/dev/null', '-c' ] + cflags + [ filename ] self._db.append({ 'directory': objdir, 'command': ' '.join(shell_quote(a) for a in cmd), 'file': filename })
def compileflags(command_context, what): from mozbuild.util import resolve_target_to_make from mozbuild.compilation import util if not util.check_top_objdir(command_context.topobjdir): return 1 path_arg = command_context._wrap_path_argument(what) make_dir, make_target = resolve_target_to_make(command_context.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 build_vars = util.get_build_vars(make_dir, command_context) if what.endswith(".c"): cc = "CC" name = "COMPILE_CFLAGS" else: cc = "CXX" name = "COMPILE_CXXFLAGS" if name not in build_vars: return # Drop the first flag since that is the pathname of the compiler. flags = (shell_split(build_vars[cc]) + shell_split(build_vars[name]))[1:] print(" ".join(shell_quote(arg) for arg in util.sanitize_cflags(flags)))
def generic_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] worker["chain-of-trust"] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault("artifacts", []) if not artifacts: generic_worker_add_artifacts(config, job, taskdesc) if job["worker"]["os"] == "windows": # There were no caches on generic-worker before bug 1519472, and they cause # all sorts of problems with Windows toolchain tasks, disable them until # tasks are ready. run["use-caches"] = False env = worker.setdefault("env", {}) env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) # Use `mach` to invoke python scripts so in-tree libraries are available. if run["script"].endswith(".py"): raise NotImplementedError( "Python toolchain scripts aren't supported on generic-worker") attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") digest_data = get_digest_data(config, run, taskdesc) if job.get("attributes", {}).get("cached_task") is not False and not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": digest_data, } run["using"] = "run-task" args = run.pop("arguments", "") if args: args = " " + shell_quote(*args) if job["worker"]["os"] == "windows": gecko_path = "%GECKO_PATH%" else: gecko_path = "$GECKO_PATH" run["command"] = "{}/taskcluster/scripts/misc/{}{}".format( gecko_path, run.pop("script"), args) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def compileflags(self, what): from mozbuild.util import resolve_target_to_make from mozbuild.compilation import util if not util.check_top_objdir(self.topobjdir): return 1 path_arg = self._wrap_path_argument(what) make_dir, make_target = resolve_target_to_make(self.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 build_vars = util.get_build_vars(make_dir, self) if what.endswith(".c"): cc = "CC" name = "COMPILE_CFLAGS" else: cc = "CXX" name = "COMPILE_CXXFLAGS" if name not in build_vars: return # Drop the first flag since that is the pathname of the compiler. flags = (shell_split(build_vars[cc]) + shell_split(build_vars[name]))[1:] print(" ".join(shell_quote(arg) for arg in util.sanitize_cflags(flags)))
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] worker['chain-of-trust'] = True docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) hg = r'c:\Program Files\Mercurial\hg.exe' hg_command = ['"{}"'.format(hg)] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', '%GECKO_HEAD_REV%']) hg_command.append('%GECKO_HEAD_REPOSITORY%') hg_command.append('.\\build\\src') # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): raise NotImplementedError("Python scripts don't work on Windows") args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) bash = r'c:\mozilla-build\msys\bin\bash' worker['command'] = [ ' '.join(hg_command), # do something intelligent. r'{} build/src/taskcluster/scripts/misc/{}{}'.format( bash, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def consume_finished(self): CommonBackend.consume_finished(self) for objdir, backend_file in sorted(self._backend_files.items()): for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) with self._write_file(fh=backend_file): pass with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines = [name for name in self.environment.defines if not name in self.environment.non_global_defines] acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(self.environment.defines[name])) for name in sorted(acdefines)]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % ( os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir) )) fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n') fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n') # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init'])
def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_outputs = [self._installed_files] if obj.required_for_compile else None backend_file.rule( display='python {script}:{method} -> [%o]'.format(script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, extra_outputs=extra_outputs, )
def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) backend_file.rule( display='python {script}:{method} -> [%o]'.format( script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, )
def compileflags(self, what): from mozbuild.util import resolve_target_to_make from mozbuild.compilation import util if not util.check_top_objdir(self.topobjdir): return 1 path_arg = self._wrap_path_argument(what) make_dir, make_target = resolve_target_to_make(self.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 build_vars = util.get_build_vars(make_dir, self) if what.endswith('.c'): name = 'COMPILE_CFLAGS' else: name = 'COMPILE_CXXFLAGS' if name not in build_vars: return print(' '.join(shell_quote(arg) for arg in util.get_flags(self.topobjdir, make_dir, build_vars, name)))
def compileflags(self, what): from mozbuild.util import resolve_target_to_make from mozbuild.compilation import util if not util.check_top_objdir(self.topobjdir): return 1 path_arg = self._wrap_path_argument(what) make_dir, make_target = resolve_target_to_make(self.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 build_vars = util.get_build_vars(make_dir, self) if what.endswith('.c'): name = 'COMPILE_CFLAGS' else: name = 'COMPILE_CXXFLAGS' if name not in build_vars: return print(' '.join(shell_quote(arg) for arg in shell_split(build_vars[name])))
def compileflags(self, what): from mozbuild.util import resolve_target_to_make from mozbuild.compilation import util if not util.check_top_objdir(self.topobjdir): return 1 path_arg = self._wrap_path_argument(what) make_dir, make_target = resolve_target_to_make(self.topobjdir, path_arg.relpath()) if make_dir is None and make_target is None: return 1 build_vars = util.get_build_vars(make_dir, self) if what.endswith('.c'): cc = 'CC' name = 'COMPILE_CFLAGS' else: cc = 'CXX' name = 'COMPILE_CXXFLAGS' if name not in build_vars: return # Drop the first flag since that is the pathname of the compiler. flags = (shell_split(build_vars[cc]) + shell_split(build_vars[name]))[1:] print(' '.join(shell_quote(arg) for arg in util.sanitize_cflags(flags)))
def test_check_prog_with_args(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a"] ) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % self.KNOWN_A], ) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) path = self.KNOWN_B.replace("known-b", "known-a") config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % path] ) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent( """\ checking for foo... not found DEBUG: foo: Looking for %s ERROR: Cannot find foo """ ) % path, ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))', ["FOO=known c"] ) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)}) self.assertEqual( out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C)) ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' "allow_missing=True)", ["FOO=unknown"], ) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent( """\ checking for foo... not found DEBUG: foo: Looking for unknown ERROR: Cannot find foo """ ), )
def _build_db_line(self, objdir, cenv, filename, canonical_suffix, flags, ishost): # Distinguish between host and target files. prefix = 'HOST_' if ishost else '' if canonical_suffix == '.c': compiler = cenv.substs[prefix + 'CC'] cflags = list(flags['COMPILE_CFLAGS']) # Add the Objective-C flags if needed. if filename.endswith('.m'): cflags.extend(flags['COMPILE_CMFLAGS']) elif canonical_suffix == '.cpp': compiler = cenv.substs[prefix + 'CXX'] cflags = list(flags['COMPILE_CXXFLAGS']) # Add the Objective-C++ flags if needed. if filename.endswith('.mm'): cflags.extend(flags['COMPILE_CMMFLAGS']) else: return cmd = compiler.split() + ['-o', '/dev/null', '-c' ] + cflags + [filename] self._db.append({ 'directory': objdir, 'command': ' '.join(shell_quote(a) for a in cmd), 'file': filename })
def _process_generated_file(self, backend_file, obj): if obj.script and obj.method: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required 'unused', # deps target is required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_exports = { 'buildid.h': ['MOZ_BUILD_DATE'], } for f in obj.outputs: exports = extra_exports.get(f) if exports: backend_file.export(exports) if any( f.endswith(('automation.py', 'source-repo.h', 'buildid.h')) for f in obj.outputs): output_group = self._early_generated_files else: output_group = self._installed_files if obj.required_for_compile else None full_inputs += [self._early_generated_files] extra_inputs = [] if any(f in obj.outputs for f in ('dependentlibs.list', 'dependendentlibs.list.gtest')): extra_inputs += [self._shlibs] if len(outputs) > 3: display_outputs = ', '.join(outputs[0:3]) + ', ...' else: display_outputs = ', '.join(outputs) display = 'python {script}:{method} -> [{display_outputs}]'.format( script=obj.script, method=obj.method, display_outputs=display_outputs) backend_file.rule( display=display, cmd=cmd, inputs=full_inputs, extra_inputs=extra_inputs, outputs=outputs, output_group=output_group, check_unchanged=True, )
def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file( mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) for objdir, backend_file in sorted(self._backend_files.items()): for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) with self._write_file(fh=backend_file): pass with self._write_file( mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines = [ name for name in self.environment.defines if not name in self.environment.non_global_defines ] acdefines_flags = ' '.join([ '-D%s=%s' % (name, shell_quote(self.environment.defines[name])) for name in sorted(acdefines) ]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # TODO: BOOKMARKS_INCLUDE_DIR is used by bookmarks.html.in, and is # only defined in browser/locales/Makefile.in acdefines_flags += ' -DBOOKMARKS_INCLUDE_DIR=%s/browser/locales/en-US/profile' % self.environment.topsrcdir # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % (os.path.relpath( self.environment.topsrcdir, self.environment.topobjdir))) fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n') fh.write( 'PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write( 'IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n' ) # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init'])
def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required 'unused', # deps target is required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_exports = { 'buildid.h': ['MOZ_BUILD_DATE'], } for f in obj.outputs: exports = extra_exports.get(f) if exports: backend_file.export(exports) if any( f.endswith(('automation.py', 'source-repo.h', 'buildid.h')) for f in obj.outputs): extra_outputs = [self._early_generated_files] else: extra_outputs = [self._installed_files ] if obj.required_for_compile else [] full_inputs += [self._early_generated_files] if len(outputs) > 3: display_outputs = ', '.join(outputs[0:3]) + ', ...' else: display_outputs = ', '.join(outputs) display = 'python {script}:{method} -> [{display_outputs}]'.format( script=obj.script, method=obj.method, display_outputs=display_outputs) backend_file.rule( display=display, cmd=cmd, inputs=full_inputs, outputs=outputs, extra_outputs=extra_outputs, check_unchanged=True, )
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.items(): env = self._envs[directory] cmd = self._build_cmd(cmd, filename, unified) variables = { "DIST": mozpath.join(env.topobjdir, "dist"), "DEPTH": env.topobjdir, "MOZILLA_DIR": env.topsrcdir, "topsrcdir": env.topsrcdir, "topobjdir": env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: accum = "" for word in expand_variables(a, variables).split(): # We can't just split() the output of expand_variables since # there can be spaces enclosed by quotes, e.g. '"foo bar"'. # Handle that case by checking whether there are an even # number of double-quotes in the word and appending it to # the accumulator if not. Meanwhile, shlex.split() and # mozbuild.shellutil.split() aren't able to properly handle # this and break in various ways, so we can't use something # off-the-shelf. has_quote = bool(word.count('"') % 2) if accum and has_quote: c.append(accum + " " + word) accum = "" elif accum and not has_quote: accum += " " + word elif not accum and has_quote: accum = word else: c.append(word) # Tell clangd to keep parsing to the end of a file, regardless of # how many errors are encountered. (Unified builds mean that we # encounter a lot of errors parsing some files.) c.insert(-1, "-ferror-limit=0") per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append( { "directory": directory, "command": " ".join(shell_quote(a) for a in c), "file": mozpath.join(directory, filename), } ) import json outputfile = self._outputfile_path() with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) local_extra = list(self._extra_includes[directory]) if directory not in self._gyp_dirs: for var in ( 'NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS', 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS', ): f = env.substs.get(var) if f: local_extra.extend(f) variables = { 'LOCAL_INCLUDES': self._includes[directory], 'DEFINES': self._defines[directory], 'EXTRA_INCLUDES': local_extra, 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: a = expand_variables(a, variables).split() if not a: continue if isinstance(a, types.StringTypes): c.append(a) else: c.extend(a) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': filename, }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file(mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) if self._built_in_addons: with self._write_file(mozpath.join(self.environment.topobjdir, self._built_in_addons_file)) as fh: json.dump({'system': sorted(list(self._built_in_addons))}, fh) for objdir, backend_file in sorted(self._backend_files.items()): backend_file.gen_sources_rules([self._installed_files]) for var, gen_method in ((backend_file.shared_lib, self._gen_shared_library), (backend_file.static_lib and backend_file.static_lib.no_expand_lib, self._gen_static_library), (backend_file.programs, self._gen_programs), (backend_file.host_programs, self._gen_host_programs), (backend_file.host_library, self._gen_host_library)): if var: backend_file.export_shell() gen_method(backend_file) for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) for path, output, output_group in backend_file.delayed_installed_files: backend_file.symlink_rule(path, output=output, output_group=output_group) with self._write_file(fh=backend_file): pass with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value)) for (name, value) in sorted(self.environment.acdefines.iteritems())]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % ( os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir) )) fh.write('PYTHON = PYTHONDONTWRITEBYTECODE=1 %s\n' % self.environment.substs['PYTHON']) fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n') # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init'])
def __init__(self, topsrcdir, topobjdir, defines=None, substs=None, source=None, mozconfig=None): if not source: source = mozpath.join(topobjdir, 'config.status') self.source = source self.defines = ReadOnlyDict(defines or {}) self.substs = dict(substs or {}) self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath(topobjdir) self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None self.lib_prefix = self.substs.get('LIB_PREFIX', '') self.rust_lib_prefix = self.substs.get('RUST_LIB_PREFIX', '') if 'LIB_SUFFIX' in self.substs: self.lib_suffix = '.%s' % self.substs['LIB_SUFFIX'] if 'RUST_LIB_SUFFIX' in self.substs: self.rust_lib_suffix = '.%s' % self.substs['RUST_LIB_SUFFIX'] self.dll_prefix = self.substs.get('DLL_PREFIX', '') self.dll_suffix = self.substs.get('DLL_SUFFIX', '') self.host_dll_prefix = self.substs.get('HOST_DLL_PREFIX', '') self.host_dll_suffix = self.substs.get('HOST_DLL_SUFFIX', '') if self.substs.get('IMPORT_LIB_SUFFIX'): self.import_prefix = self.lib_prefix self.import_suffix = '.%s' % self.substs['IMPORT_LIB_SUFFIX'] else: self.import_prefix = self.dll_prefix self.import_suffix = self.dll_suffix self.bin_suffix = self.substs.get('BIN_SUFFIX', '') global_defines = [name for name in self.defines] self.substs["ACDEFINES"] = ' '.join([ '-D%s=%s' % (name, shell_quote(self.defines[name]).replace('$', '$$')) for name in sorted(global_defines) ]) def serialize(name, obj): if isinstance(obj, six.string_types): return obj if isinstance(obj, Iterable): return ' '.join(obj) raise Exception('Unhandled type %s for %s', type(obj), str(name)) self.substs['ALLSUBSTS'] = '\n'.join( sorted([ '%s = %s' % (name, serialize(name, self.substs[name])) for name in self.substs if self.substs[name] ])) self.substs['ALLEMPTYSUBSTS'] = '\n'.join( sorted([ '%s =' % name for name in self.substs if not self.substs[name] ])) self.substs = ReadOnlyDict(self.substs)
def get_defines(self): for define, value in self.defines.iteritems(): if value is True: yield('-D%s' % define) elif value is False: yield('-U%s' % define) else: yield('-D%s=%s' % (define, shell_quote(value)))
def windows_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] = job["worker"] # Allow the job to specify where artifacts come from. worker.setdefault( "artifacts", [{ "path": r"public\build", "type": "directory", }], ) worker["chain-of-trust"] = True # There were no caches on generic-worker before bug 1519472, and they cause # all sorts of problems with toolchain tasks, disable them until # tasks are ready. run["use-caches"] = False env = worker.setdefault("env", {}) env.update({ "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], }) # Use `mach` to invoke python scripts so in-tree libraries are available. if run["script"].endswith(".py"): raise NotImplementedError("Python scripts don't work on Windows") args = run.get("arguments", "") if args: args = " " + shell_quote(*args) attributes = taskdesc.setdefault("attributes", {}) attributes["toolchain-artifact"] = run.pop("toolchain-artifact") if "toolchain-alias" in run: attributes["toolchain-alias"] = run.pop("toolchain-alias") if not taskgraph.fast: name = taskdesc["label"].replace("{}-".format(config.kind), "", 1) taskdesc["cache"] = { "type": CACHE_TYPE, "name": name, "digest-data": get_digest_data(config, run, taskdesc), } bash = r"c:\mozilla-build\msys\bin\bash" run["using"] = "run-task" run["command"] = [ # do something intelligent. r"{} build/src/taskcluster/scripts/misc/{}{}".format( bash, run.pop("script"), args) ] run.pop("arguments", None) configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) variables = { 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: accum = '' for word in expand_variables(a, variables).split(): # We can't just split() the output of expand_variables since # there can be spaces enclosed by quotes, e.g. '"foo bar"'. # Handle that case by checking whether there are an even # number of double-quotes in the word and appending it to # the accumulator if not. Meanwhile, shlex.split() and # mozbuild.shellutil.split() aren't able to properly handle # this and break in various ways, so we can't use something # off-the-shelf. has_quote = bool(word.count('"') % 2) if accum and has_quote: c.append(accum + ' ' + word) accum = '' elif accum and not has_quote: accum += ' ' + word elif not accum and has_quote: accum = word else: c.append(word) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': mozpath.join(directory, filename), }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] worker['chain-of-trust'] = True support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) hg_command = generic_worker_hg_commands( 'https://hg.mozilla.org/mozilla-unified', env['GECKO_HEAD_REPOSITORY'], env['GECKO_HEAD_REV'], r'.\build\src')[0] # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): raise NotImplementedError("Python scripts don't work on Windows") args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) bash = r'c:\mozilla-build\msys\bin\bash' worker['command'] = [ hg_command, # do something intelligent. r'{} build/src/taskcluster/scripts/misc/{}{}'.format( bash, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def test_check_prog(self): config, out, status = self.get_result( 'check_prog("FOO", ("known-a",))') self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_B}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_B) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)}) self.assertEqual( out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for foo... not found DEBUG: foo: Looking for unknown ERROR: Cannot find foo """), ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for foo... not found DEBUG: foo: Looking for unknown DEBUG: foo: Looking for unknown-2 DEBUG: foo: Looking for 'unknown 3' ERROR: Cannot find foo """), ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' "allow_missing=True)") self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, "checking for foo... not found\n")
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = job['worker'] # Allow the job to specify where artifacts come from. worker.setdefault('artifacts', [{ 'path': r'public\build', 'type': 'directory', }]) worker['chain-of-trust'] = True # There were no caches on generic-worker before bug 1519472, and they cause # all sorts of problems with toolchain tasks, disable them until # tasks are ready. run['use-caches'] = False env = worker.setdefault('env', {}) env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): raise NotImplementedError("Python scripts don't work on Windows") args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run.pop('toolchain-artifact') if 'toolchain-alias' in run: attributes['toolchain-alias'] = run.pop('toolchain-alias') if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) taskdesc['cache'] = { 'type': CACHE_TYPE, 'name': name, 'digest-data': get_digest_data(config, run, taskdesc), } bash = r'c:\mozilla-build\msys\bin\bash' run['using'] = 'run-task' run['command'] = [ # do something intelligent. r'{} build/src/taskcluster/scripts/misc/{}{}'.format( bash, run.pop('script'), args) ] run.pop('arguments', None) configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
def _process_generated_file(self, backend_file, obj): # TODO: These are directories that don't work in the tup backend # yet, because things they depend on aren't built yet. skip_directories = ( 'layout/style/test', # HostSimplePrograms 'toolkit/library', # libxul.so ) if obj.script and obj.method and obj.relobjdir not in skip_directories: backend_file.export_shell() cmd = self._py_action('file_generate') if obj.localized: cmd.append('--locale=en-US') cmd.extend([ obj.script, obj.method, obj.outputs[0], '%s.pp' % obj.outputs[0], # deps file required ]) full_inputs = [f.full_path for f in obj.inputs] cmd.extend(full_inputs) cmd.extend(shell_quote(f) for f in obj.flags) outputs = [] outputs.extend(obj.outputs) outputs.append('%s.pp' % obj.outputs[0]) extra_exports = { 'buildid.h': ['MOZ_BUILD_DATE'], } for f in obj.outputs: exports = extra_exports.get(f) if exports: backend_file.export(exports) if any(f in obj.outputs for f in ('source-repo.h', 'buildid.h')): extra_outputs = [self._early_generated_files] else: extra_outputs = [self._installed_files ] if obj.required_for_compile else [] full_inputs += [self._early_generated_files] backend_file.rule( display='python {script}:{method} -> [%o]'.format( script=obj.script, method=obj.method), cmd=cmd, inputs=full_inputs, outputs=outputs, extra_outputs=extra_outputs, )
def test_check_prog_with_args(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % self.KNOWN_A]) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) path = self.KNOWN_B.replace('known-b', 'known-a') config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % path]) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying %s ERROR: Cannot find foo ''') % path) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))', ['FOO=known c']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': fake_short_path(self.KNOWN_C)}) self.assertEqual( out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' 'allow_missing=True)', ['FOO=unknown']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown ERROR: Cannot find foo '''))
def consume_finished(self): CommonBackend.consume_finished(self) # The approach here is similar to fastermake.py, but we # simply write out the resulting files here. for target, entries in self._manifest_entries.iteritems(): with self._write_file(mozpath.join(self.environment.topobjdir, target)) as fh: fh.write(''.join('%s\n' % e for e in sorted(entries))) for objdir, backend_file in sorted(self._backend_files.items()): for obj in backend_file.delayed_generated_files: self._process_generated_file(backend_file, obj) backend_file.gen_sources_rules([self._installed_files]) with self._write_file(fh=backend_file): pass with self._write_file(mozpath.join(self.environment.topobjdir, 'Tuprules.tup')) as fh: acdefines_flags = ' '.join(['-D%s=%s' % (name, shell_quote(value)) for (name, value) in sorted(self.environment.acdefines.iteritems())]) # TODO: AB_CD only exists in Makefiles at the moment. acdefines_flags += ' -DAB_CD=en-US' # TODO: BOOKMARKS_INCLUDE_DIR is used by bookmarks.html.in, and is # only defined in browser/locales/Makefile.in acdefines_flags += ' -DBOOKMARKS_INCLUDE_DIR=%s/browser/locales/en-US/profile' % self.environment.topsrcdir # Use BUILD_FASTER to avoid CXXFLAGS/CPPFLAGS in # toolkit/content/buildconfig.html acdefines_flags += ' -DBUILD_FASTER=1' fh.write('MOZ_OBJ_ROOT = $(TUP_CWD)\n') fh.write('DIST = $(MOZ_OBJ_ROOT)/dist\n') fh.write('ACDEFINES = %s\n' % acdefines_flags) fh.write('topsrcdir = $(MOZ_OBJ_ROOT)/%s\n' % ( os.path.relpath(self.environment.topsrcdir, self.environment.topobjdir) )) fh.write('PYTHON = $(MOZ_OBJ_ROOT)/_virtualenv/bin/python -B\n') fh.write('PYTHON_PATH = $(PYTHON) $(topsrcdir)/config/pythonpath.py\n') fh.write('PLY_INCLUDE = -I$(topsrcdir)/other-licenses/ply\n') fh.write('IDL_PARSER_DIR = $(topsrcdir)/xpcom/idl-parser\n') fh.write('IDL_PARSER_CACHE_DIR = $(MOZ_OBJ_ROOT)/xpcom/idl-parser/xpidl\n') # Run 'tup init' if necessary. if not os.path.exists(mozpath.join(self.environment.topsrcdir, ".tup")): tup = self.environment.substs.get('TUP', 'tup') self._cmd.run_process(cwd=self.environment.topsrcdir, log_name='tup', args=[tup, 'init'])
def consume_finished(self): CommonBackend.consume_finished(self) db = [] for (directory, filename, unified), cmd in self._db.iteritems(): env = self._envs[directory] cmd = list(cmd) if unified is None: cmd.append(filename) else: cmd.append(unified) variables = { 'DIST': mozpath.join(env.topobjdir, 'dist'), 'DEPTH': env.topobjdir, 'MOZILLA_DIR': env.topsrcdir, 'topsrcdir': env.topsrcdir, 'topobjdir': env.topobjdir, } variables.update(self._local_flags[directory]) c = [] for a in cmd: a = expand_variables(a, variables).split() if not a: continue if isinstance(a, types.StringTypes): c.append(a) else: c.extend(a) per_source_flags = self._per_source_flags.get(filename) if per_source_flags is not None: c.extend(per_source_flags) db.append({ 'directory': directory, 'command': ' '.join(shell_quote(a) for a in c), 'file': mozpath.join(directory, filename), }) import json # Output the database (a JSON file) to objdir/compile_commands.json outputfile = os.path.join(self.environment.topobjdir, 'compile_commands.json') with self._write_file(outputfile) as jsonout: json.dump(db, jsonout, indent=0)
def _preprocess(self, backend_file, input_file, destdir=None): # .css files use '%' as the preprocessor marker, which must be scaped as # '%%' in the Tupfile. marker = '%%' if input_file.endswith('.css') else '#' cmd = self._py_action('preprocessor') cmd.extend([shell_quote(d) for d in backend_file.defines]) cmd.extend(['$(ACDEFINES)', '%f', '-o', '%o', '--marker=%s' % marker]) base_input = mozpath.basename(input_file) if base_input.endswith('.in'): base_input = mozpath.splitext(base_input)[0] output = mozpath.join(destdir, base_input) if destdir else base_input backend_file.rule( inputs=[input_file], display='Preprocess %o', cmd=cmd, outputs=[output], )
def write_vars(self, config): substs = config['substs'].copy() defines = config['defines'].copy() global_defines = [ name for name in config['defines'] if name not in config['non_global_defines'] ] acdefines = ' '.join(['-D%s=%s' % (name, shell_quote(config['defines'][name]).replace('$', '$$')) for name in sorted(global_defines)]) substs['ACDEFINES'] = acdefines all_defines = OrderedDict() for k in global_defines: all_defines[k] = config['defines'][k] defines['ALLDEFINES'] = all_defines self.substs._fill_group(substs) self.defines._fill_group(defines)
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any(artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = 'workspace/build/src/mach python ' else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = ['--sparse-profile', 'build/sparse-profiles/{}'.format(run['sparse-profile'])] worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', ] + sparse_profile + [ '--', 'bash', '-c', 'cd /builds/worker && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def __init__(self, topsrcdir, topobjdir, defines=None, non_global_defines=None, substs=None, source=None, mozconfig=None): if not source: source = mozpath.join(topobjdir, 'config.status') self.source = source self.defines = ReadOnlyDict(defines or {}) self.non_global_defines = non_global_defines or [] self.substs = dict(substs or {}) self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath(topobjdir) self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None self.lib_prefix = self.substs.get('LIB_PREFIX', '') if 'LIB_SUFFIX' in self.substs: self.lib_suffix = '.%s' % self.substs['LIB_SUFFIX'] self.dll_prefix = self.substs.get('DLL_PREFIX', '') self.dll_suffix = self.substs.get('DLL_SUFFIX', '') if self.substs.get('IMPORT_LIB_SUFFIX'): self.import_prefix = self.lib_prefix self.import_suffix = '.%s' % self.substs['IMPORT_LIB_SUFFIX'] else: self.import_prefix = self.dll_prefix self.import_suffix = self.dll_suffix global_defines = [name for name in self.defines if not name in self.non_global_defines] self.substs['ACDEFINES'] = ' '.join(['-D%s=%s' % (name, shell_quote(self.defines[name]).replace('$', '$$')) for name in sorted(global_defines)]) def serialize(obj): if isinstance(obj, StringTypes): return obj if isinstance(obj, Iterable): return ' '.join(obj) raise Exception('Unhandled type %s', type(obj)) self.substs['ALLSUBSTS'] = '\n'.join(sorted(['%s = %s' % (name, serialize(self.substs[name])) for name in self.substs if self.substs[name]])) self.substs['ALLEMPTYSUBSTS'] = '\n'.join(sorted(['%s =' % name for name in self.substs if not self.substs[name]])) self.substs = ReadOnlyDict(self.substs) self.external_source_dir = None external = self.substs.get('EXTERNAL_SOURCE_DIR', '') if external: external = mozpath.normpath(external) if not os.path.isabs(external): external = mozpath.join(self.topsrcdir, external) self.external_source_dir = mozpath.normpath(external) # Populate a Unicode version of substs. This is an optimization to make # moz.build reading faster, since each sandbox needs a Unicode version # of these variables and doing it over a thousand times is a hotspot # during sandbox execution! # Bug 844509 tracks moving everything to Unicode. self.substs_unicode = {} def decode(v): if not isinstance(v, text_type): try: return v.decode('utf-8') except UnicodeDecodeError: return v.decode('utf-8', 'replace') for k, v in self.substs.items(): if not isinstance(v, StringTypes): if isinstance(v, Iterable): type(v)(decode(i) for i in v) elif not isinstance(v, text_type): v = decode(v) self.substs_unicode[k] = v self.substs_unicode = ReadOnlyDict(self.substs_unicode)