def docker_worker_mulet_simulator(config, job, taskdesc): run = job['run'] worker = taskdesc.get('worker') # assumes the builder image (which contains the gecko checkout command) taskdesc['worker']['docker-image'] = {"in-tree": "builder"} worker['taskcluster-proxy'] = False docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) taskdesc.setdefault('routes', []).extend([ 'index.gecko.v1.{project}.latest.simulator.opt'.format( **config.params), ]) shell_command = run['shell-command'].format(**config.params) worker['command'] = [ "/bin/bash", "-exc", { 'task-reference': shell_command }, ]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] worker['artifacts'].append({ 'name': 'public', 'path': '/home/worker/workspace/artifacts/', 'type': 'directory', }) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', }) # tooltool downloads. By default we download using the API endpoint, but # the job can optionally request relengapi-proxy (for example when downloading # internal tooltool resources. So we define the tooltool cache unconditionally. worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' # tooltool downloads worker['relengapi-proxy'] = False # but maybe enabled for tooltool below if run['tooltool-downloads']: worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) if run['tooltool-downloads'] == 'internal': taskdesc['scopes'].append( 'docker-worker:relengapi-proxy:tooltool.download.internal') worker['command'] = [ '/home/worker/bin/run-task', # Various caches/volumes are default owned by root:root. '--chown-recursive', '/home/worker/workspace', '--chown-recursive', '/home/worker/tooltool-cache', '--vcs-checkout=/home/worker/workspace/build/src', '--', 'bash', '-c', 'cd /home/worker && ' './workspace/build/src/taskcluster/scripts/misc/{}'.format( run['script']) ] add_optimizations(config, run, taskdesc)
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] worker['chain-of-trust'] = True docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) hg = r'c:\Program Files\Mercurial\hg.exe' hg_command = ['"{}"'.format(hg)] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', '%GECKO_HEAD_REV%']) hg_command.append('%GECKO_HEAD_REPOSITORY%') hg_command.append('.\\build\\src') # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): raise NotImplementedError("Python scripts don't work on Windows") args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) bash = r'c:\mozilla-build\msys\bin\bash' worker['command'] = [ ' '.join(hg_command), # do something intelligent. r'{} build/src/taskcluster/scripts/misc/{}{}'.format( bash, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def docker_worker_phone_builder(config, job, taskdesc): run = job['run'] worker = taskdesc.get('worker') worker['artifacts'] = [{ 'name': 'private/build', 'path': '/home/worker/artifacts/', 'type': 'directory', }, { 'name': 'public/build', 'path': '/home/worker/artifacts-public/', 'type': 'directory', }] docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) if config.params['project'] != 'try': taskdesc['worker']['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-{}-{}'.format( config.params['level'], config.params['project'], taskdesc['attributes']['build_platform'], taskdesc['attributes']['build_type'],), 'mount-point': "/home/worker/workspace", }) taskdesc['worker']['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-{}-{}-objdir-gecko'.format( config.params['level'], config.params['project'], taskdesc['attributes']['build_platform'], taskdesc['attributes']['build_type'],), 'mount-point': "/home/worker/objdir-gecko", }) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': 'b2g/taskcluster-phone-eng.py', 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'TARGET': run['target'], }) if run['debug']: env['B2G_DEBUG'] = '1' # tooltool downloads worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.internal', 'docker-worker:relengapi-proxy:tooltool.download.public', ]) worker['command'] = [ "/bin/bash", "-c", "checkout-gecko workspace" " && cd ./workspace/gecko/taskcluster/scripts/phone-builder" " && buildbot_step 'Build' ./build-phone.sh $HOME/workspace", ]
def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] if int(config.params['level']) > 1: worker['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-spidermonkey-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/home/worker/workspace", }) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZHARNESS_DISABLE': 'true', 'TOOLS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) # tooltool downloads; note that this script downloads using the API # endpoiint directly, rather than via relengapi-proxy worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' if run.get('tooltool-manifest'): env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" worker['command'] = [ "/bin/bash", "-c", "cd /home/worker/ " "&& ./bin/checkout-sources.sh " "&& ./workspace/build/src/taskcluster/scripts/builder/" + script ]
def mozharness_on_windows(config, job, taskdesc): run = job['run'] # fail if invalid run options are included invalid = [] for prop in [ 'actions', 'custom-build-variant-cfg', 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb' ]: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception( "Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', 'TOOLTOOL_REV': 'master', }) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join( [r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') for option in run.get('options', []): mh_command.append('--' + option) hg = r'c:\Program Files\Mercurial\hg.exe' worker['command'] = [ r'mkdir .\build\src', r'"{}" share c:\builds\hg-shared\mozilla-central .\build\src'.format( hg), r'"{}" pull -u -R .\build\src --rev %GECKO_HEAD_REV% %GECKO_HEAD_REPOSITORY%' .format(hg), ' '.join(mh_command), ]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] worker['artifacts'].append({ 'name': 'public', 'path': '/home/worker/workspace/artifacts/', 'type': 'directory', }) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', }) # tooltool downloads. By default we download using the API endpoint, but # the job can optionally request relengapi-proxy (for example when downloading # internal tooltool resources. So we define the tooltool cache unconditionally. worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' # tooltool downloads worker['relengapi-proxy'] = False # but maybe enabled for tooltool below if run['tooltool-downloads']: worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) if run['tooltool-downloads'] == 'internal': taskdesc['scopes'].append( 'docker-worker:relengapi-proxy:tooltool.download.internal') worker['command'] = [ '/home/worker/bin/run-task', '--vcs-checkout=/home/worker/workspace/build/src', '--', 'bash', '-c', 'cd /home/worker && ' './workspace/build/src/taskcluster/scripts/misc/{}'.format( run['script']) ] add_optimizations(config, run, taskdesc)
def mozharness_on_windows(config, job, taskdesc): run = job['run'] # fail if invalid run options are included invalid = [] for prop in [ 'actions', 'custom-build-variant-cfg', 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb' ]: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception( "Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', 'TOOLTOOL_REV': 'master', }) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join( [r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') for option in run.get('options', []): mh_command.append('--' + option) hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\build\\src') worker['command'] = [' '.join(hg_command), ' '.join(mh_command)]
def mozharness_on_windows(config, job, taskdesc): run = job["run"] # fail if invalid run options are included invalid = [] for prop in [ "actions", "custom-build-variant-cfg", "tooltool-downloads", "secrets", "taskcluster-proxy", "need-xvfb", ]: if prop in run and run[prop]: invalid.append(prop) if not run.get("keep-artifacts", True): invalid.append("keep-artifacts") if invalid: raise Exception("Jobs run using mozharness on Windows do not support properties " + ", ".join(invalid)) worker = taskdesc["worker"] worker["artifacts"] = [{"path": r"public\build", "type": "directory"}] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker["env"] env.update( { "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], "TOOLTOOL_REPO": "https://github.com/mozilla/build-tooltool", "TOOLTOOL_REV": "master", } ) mh_command = [r"c:\mozilla-build\python\python.exe"] mh_command.append("\\".join([r".\build\src\testing", run["script"].replace("/", "\\")])) for cfg in run["config"]: mh_command.append("--config " + cfg.replace("/", "\\")) mh_command.append("--branch " + config.params["project"]) mh_command.append(r"--skip-buildbot-actions --work-dir %cd:Z:=z:%\build") for option in run.get("options", []): mh_command.append("--" + option) hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append("robustcheckout") hg_command.extend(["--sharebase", "y:\\hg-shared"]) hg_command.append("--purge") hg_command.extend(["--upstream", "https://hg.mozilla.org/mozilla-unified"]) hg_command.extend(["--revision", env["GECKO_HEAD_REV"]]) hg_command.append(env["GECKO_HEAD_REPOSITORY"]) hg_command.append(".\\build\\src") worker["command"] = [" ".join(hg_command), " ".join(mh_command)]
def generic_worker_spidermonkey(config, job, taskdesc): assert job['worker']['os'] == 'windows', 'only supports windows right now' run = job['run'] worker = taskdesc['worker'] generic_worker_add_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'SCCACHE_DISABLE': "1", 'WORK': ".", # Override the defaults in build scripts 'SRCDIR': "./src", # with values suiteable for windows generic worker 'UPLOAD_DIR': "./public/build" }) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-package is not a supported configuration") elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-mozjs-crate is not a supported configuration") elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" # Don't allow untested configurations yet raise Exception( "spidermonkey-rust-bindings is not a supported configuration") hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\src') command = [ 'c:\\mozilla-build\\msys\\bin\\bash.exe ' # string concat '"./src/taskcluster/scripts/builder/%s"' % script ] worker['command'] = [] worker['command'].extend([' '.join(hg_command), ' '.join(command)])
def windows_toolchain(config, job, taskdesc): run = job['run'] taskdesc['run-on-projects'] = ['trunk', 'try'] worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] worker['chain-of-trust'] = True docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) # We fetch LLVM SVN into this. svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format( config.params['level']) worker['mounts'] = [{ 'cache-name': svn_cache, 'directory': r'llvm-sources', }] taskdesc['scopes'].extend([ 'generic-worker:cache:' + svn_cache, ]) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) hg = r'c:\Program Files\Mercurial\hg.exe' hg_command = ['"{}"'.format(hg)] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', '%GECKO_HEAD_REV%']) hg_command.append('%GECKO_HEAD_REPOSITORY%') hg_command.append('.\\build\\src') bash = r'c:\mozilla-build\msys\bin\bash' worker['command'] = [ ' '.join(hg_command), # do something intelligent. r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format( bash, run['script']) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] add_optimizations(config, run, taskdesc)
def generic_worker_spidermonkey(config, job, taskdesc): assert job['worker']['os'] == 'windows', 'only supports windows right now' run = job['run'] worker = taskdesc['worker'] generic_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'SCCACHE_DISABLE': "1", 'WORK': ".", # Override the defaults in build scripts 'SRCDIR': "./src", # with values suiteable for windows generic worker 'UPLOAD_DIR': "./public/build" }) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" # Don't allow untested configurations yet raise Exception("spidermonkey-package is not a supported configuration") elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" # Don't allow untested configurations yet raise Exception("spidermonkey-mozjs-crate is not a supported configuration") elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" # Don't allow untested configurations yet raise Exception("spidermonkey-rust-bindings is not a supported configuration") hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\src') command = ['c:\\mozilla-build\\msys\\bin\\bash.exe ' # string concat '"./src/taskcluster/scripts/builder/%s"' % script] worker['command'] = [] worker['command'].extend([ ' '.join(hg_command), ' '.join(command) ])
def mozharness_on_windows(config, job, taskdesc): run = job['run'] # fail if invalid run options are included invalid = [] for prop in ['actions', 'custom-build-variant-cfg', 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb']: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception("Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', 'TOOLTOOL_REV': 'master', }) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') for option in run.get('options', []): mh_command.append('--' + option) hg = r'c:\Program Files\Mercurial\hg.exe' worker['command'] = [ r'mkdir .\build\src', r'"{}" share c:\builds\hg-shared\mozilla-central .\build\src'.format(hg), r'"{}" pull -u -R .\build\src --rev %GECKO_HEAD_REV% %GECKO_HEAD_REPOSITORY%'.format(hg), ' '.join(mh_command), ]
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) # We fetch LLVM SVN into this. svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level']) worker['mounts'] = [{ 'cache-name': svn_cache, 'path': r'llvm-sources', }] taskdesc['scopes'].extend([ 'generic-worker:cache:' + svn_cache, ]) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', 'TOOLTOOL_REV': 'master', }) hg = r'c:\Program Files\Mercurial\hg.exe' hg_command = ['"{}"'.format(hg)] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', '%GECKO_HEAD_REV%']) hg_command.append('%GECKO_HEAD_REPOSITORY%') hg_command.append('.\\build\\src') bash = r'c:\mozilla-build\msys\bin\bash' worker['command'] = [ ' '.join(hg_command), # do something intelligent. r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script']) ]
def docker_worker_mulet_simulator(config, job, taskdesc): run = job['run'] worker = taskdesc.get('worker') # assumes the builder image (which contains the gecko checkout command) taskdesc['worker']['docker-image'] = {"in-tree": "builder"} worker['taskcluster-proxy'] = False docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) shell_command = run['shell-command'].format(**config.params) worker['command'] = [ "/bin/bash", "-exc", {'task-reference': shell_command}, ]
def docker_worker_make_via_build_mulet_linux_sh(config, job, taskdesc): run = job['run'] worker = taskdesc.get('worker') # assumes the builder image (which contains the gecko checkout command) taskdesc['worker']['docker-image'] = {"in-tree": "builder"} worker['taskcluster-proxy'] = False docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) env['MOZCONFIG'] = run['mozconfig'] # tooltool downloads (not via relengapi proxy) worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' worker['command'] = [ "/bin/bash", "-c", "checkout-gecko workspace" " && cd ./workspace/gecko/taskcluster/scripts/builder" " && buildbot_step 'Build' ./build-mulet-linux.sh $HOME/workspace", ]
def docker_worker_hazard(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_setup_secrets(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) # script parameters if run.get('tooltool-manifest'): env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] if run.get('mozconfig'): env['MOZCONFIG'] = run['mozconfig'] # tooltool downloads worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' worker['command'] = ["/bin/bash", "-c", run['command']]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] worker['artifacts'].append({ 'name': 'public', 'path': '/home/worker/workspace/artifacts/', 'type': 'directory', }) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', }) # tooltool downloads; note that this downloads using the API endpoint directly, # rather than via relengapi-proxy worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' command = ' && '.join([ "cd /home/worker/", "./bin/checkout-sources.sh", "./workspace/build/src/taskcluster/scripts/misc/" + run['script'], ]) worker['command'] = ["/bin/bash", "-c", command]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] worker['artifacts'].append({ 'name': 'public', 'path': '/home/worker/workspace/artifacts/', 'type': 'directory', }) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', }) # tooltool downloads; note that this downloads using the API endpoint directly, # rather than via relengapi-proxy worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' command = ' && '.join([ "cd /home/worker/", "./bin/checkout-sources.sh", "./workspace/build/src/taskcluster/scripts/misc/" + run['script'], ]) worker['command'] = ["/bin/bash", "-c", command]
def windows_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) # We fetch LLVM SVN into this. svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level']) worker['mounts'] = [{ 'cache-name': svn_cache, 'path': r'llvm-sources', }] taskdesc['scopes'].extend([ 'generic-worker:cache:' + svn_cache, ]) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', 'TOOLTOOL_REV': 'master', }) hg = r'c:\Program Files\Mercurial\hg.exe' bash = r'c:\mozilla-build\msys\bin\bash' worker['command'] = [ r'mkdir .\build\src', r'"{}" share c:\builds\hg-shared\mozilla-central .\build\src'.format(hg), r'"{}" pull -u -R .\build\src --rev %GECKO_HEAD_REV% %GECKO_HEAD_REPOSITORY%'.format(hg), # do something intelligent. r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script']) ]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] taskdesc['run-on-projects'] = ['trunk', 'try'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['chain-of-trust'] = True docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', '--', 'bash', '-c', 'cd /builds/worker && ' './workspace/build/src/taskcluster/scripts/misc/{}'.format( run['script']) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] add_optimizations(config, run, taskdesc)
def docker_worker_toolchain(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] worker["artifacts"] = [] worker["caches"] = [] worker["artifacts"].append({"name": "public", "path": "/home/worker/workspace/artifacts/", "type": "directory"}) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker["env"] env.update( { "MOZ_BUILD_DATE": time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params["pushdate"])), "MOZ_SCM_LEVEL": config.params["level"], "TOOLS_DISABLE": "true", } ) # tooltool downloads; note that this downloads using the API endpoint directly, # rather than via relengapi-proxy worker["caches"].append( {"type": "persistent", "name": "tooltool-cache", "mount-point": "/home/worker/tooltool-cache"} ) env["TOOLTOOL_CACHE"] = "/home/worker/tooltool-cache" env["TOOLTOOL_REPO"] = "https://github.com/mozilla/build-tooltool" env["TOOLTOOL_REV"] = "master" command = " && ".join( [ "cd /home/worker/", "./bin/checkout-sources.sh", "./workspace/build/src/taskcluster/scripts/misc/" + run["script"], ] ) worker["command"] = ["/bin/bash", "-c", command]
def mozharness_on_generic_worker(config, job, taskdesc): assert job['worker']['os'] == 'windows', 'only supports windows right now' run = job['run'] # fail if invalid run options are included invalid = [] for prop in ['tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb']: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception("Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] generic_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) if run['use-simple-package']: env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'}) if 'extra-config' in run: env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config']) # The windows generic worker uses batch files to pass environment variables # to commands. Setting a variable to empty in a batch file unsets, so if # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that # mozharness doesn't try to find the commit message on its own. if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] or 'no commit message' if not job['attributes']['build_platform'].startswith('win'): raise Exception( "Task generation for mozharness build jobs currently only supported on Windows" ) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')])) if 'config-paths' in run: for path in run['config-paths']: mh_command.append(r'--extra-config-path ' r'.\build\src\{}'.format(path.replace('/', '\\'))) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) if run['use-magic-mh-args']: mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions') mh_command.append(r'--work-dir %cd:Z:=z:%\build') for action in run.get('actions', []): assert ' ' not in action mh_command.append('--' + action) for option in run.get('options', []): assert ' ' not in option mh_command.append('--' + option) if run.get('custom-build-variant-cfg'): mh_command.append('--custom-build-variant') mh_command.append(run['custom-build-variant-cfg']) def checkout_repo(base_repo, head_repo, head_rev, path): hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', base_repo]) hg_command.extend(['--revision', head_rev]) hg_command.append(head_repo) hg_command.append(path) logging_command = [ b":: TinderboxPrint:<a href={source_repo}/rev/{revision} " b"title='Built from {repo_name} revision {revision}'>{revision}</a>\n".format( revision=head_rev, source_repo=head_repo, repo_name=head_repo.split('/')[-1], )] return [ ' '.join(hg_command), ' '.join(logging_command), ] hg_commands = checkout_repo( base_repo=env['GECKO_BASE_REPOSITORY'], head_repo=env['GECKO_HEAD_REPOSITORY'], head_rev=env['GECKO_HEAD_REV'], path='.\\build\\src') if run['comm-checkout']: hg_commands.extend( checkout_repo( base_repo=env['COMM_BASE_REPOSITORY'], head_repo=env['COMM_HEAD_REPOSITORY'], head_rev=env['COMM_HEAD_REV'], path='.\\build\\src\\comm') ) worker['command'] = [] if taskdesc.get('needs-sccache'): worker['command'].extend([ # Make the comment part of the first command, as it will help users to # understand what is going on, and why these steps are implemented. dedent('''\ :: sccache currently uses the full compiler commandline as input to the :: cache hash key, so create a symlink to the task dir and build from :: the symlink dir to get consistent paths. if exist z:\\build rmdir z:\\build'''), r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ]) worker['command'].extend(hg_commands) worker['command'].extend([ ' '.join(mh_command) ])
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any(artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = 'workspace/build/src/mach python ' else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = ['--sparse-profile', 'build/sparse-profiles/{}'.format(run['sparse-profile'])] worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', ] + sparse_profile + [ '--', 'bash', '-c', 'cd /builds/worker && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def mozharness_on_generic_worker(config, job, taskdesc): assert job['worker']['os'] == 'windows', 'only supports windows right now' run = job['run'] # fail if invalid run options are included invalid = [] for prop in [ 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb' ]: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception( "Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] generic_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) if run['use-simple-package']: env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'}) if not job['attributes']['build_platform'].startswith('win'): raise Exception( "Task generation for mozharness build jobs currently only supported on Windows" ) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join( [r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) if run['use-magic-mh-args']: mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions') mh_command.append(r'--work-dir %cd:Z:=z:%\build') for action in run.get('actions', []): assert ' ' not in action mh_command.append('--' + action) for option in run.get('options', []): assert ' ' not in option mh_command.append('--' + option) if run.get('custom-build-variant-cfg'): mh_command.append('--custom-build-variant') mh_command.append(run['custom-build-variant-cfg']) hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\build\\src') worker['command'] = [] if taskdesc.get('needs-sccache'): worker['command'].extend([ # Make the comment part of the first command, as it will help users to # understand what is going on, and why these steps are implemented. dedent('''\ :: sccache currently uses the full compiler commandline as input to the :: cache hash key, so create a symlink to the task dir and build from :: the symlink dir to get consistent paths. if exist z:\\build rmdir z:\\build'''), r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ]) worker['command'].extend([' '.join(hg_command), ' '.join(mh_command)])
def mozharness_on_generic_worker(config, job, taskdesc): run = job['run'] # fail if invalid run options are included invalid = [] for prop in ['actions', 'custom-build-variant-cfg', 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb']: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception("Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public/build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_SIMPLE_PACKAGE_NAME': 'target', }) if not job['attributes']['build_platform'].startswith('win'): raise Exception( "Task generation for mozharness build jobs currently only supported on Windows" ) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') for option in run.get('options', []): mh_command.append('--' + option) hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\build\\src') worker['command'] = [] if taskdesc.get('needs-sccache'): worker['command'].extend([ # Make the comment part of the first command, as it will help users to # understand what is going on, and why these steps are implemented. dedent('''\ :: sccache currently uses the full compiler commandline as input to the :: cache hash key, so create a symlink to the task dir and build from :: the symlink dir to get consistent paths. if exist z:\\build rmdir z:\\build'''), r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ]) worker['command'].extend([ ' '.join(hg_command), ' '.join(mh_command) ])
def mozharness_on_generic_worker(config, job, taskdesc): assert job['worker']['os'] == 'windows', 'only supports windows right now' run = job['run'] # fail if invalid run options are included invalid = [] for prop in [ 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb' ]: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception( "Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] generic_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) if run['use-simple-package']: env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'}) # The windows generic worker uses batch files to pass environment variables # to commands. Setting a variable to empty in a batch file unsets, so if # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that # mozharness doesn't try to find the commit message on its own. if 'try' in config.params['project']: env['TRY_COMMIT_MSG'] = config.params['message'] or 'no commit message' if not job['attributes']['build_platform'].startswith('win'): raise Exception( "Task generation for mozharness build jobs currently only supported on Windows" ) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join( [r'.\build\src\testing', run['script'].replace('/', '\\')])) if 'config-paths' in run: for path in run['config-paths']: mh_command.append(r'--extra-config-path ' r'.\build\src\{}'.format(path.replace('/', '\\'))) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) if run['use-magic-mh-args']: mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions') mh_command.append(r'--work-dir %cd:Z:=z:%\build') for action in run.get('actions', []): assert ' ' not in action mh_command.append('--' + action) for option in run.get('options', []): assert ' ' not in option mh_command.append('--' + option) if run.get('custom-build-variant-cfg'): mh_command.append('--custom-build-variant') mh_command.append(run['custom-build-variant-cfg']) def checkout_repo(base_repo, head_repo, head_rev, path): hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', base_repo]) hg_command.extend(['--revision', head_rev]) hg_command.append(head_repo) hg_command.append(path) logging_command = [ b":: TinderboxPrint:<a href={source_repo}/rev/{revision} " b"title='Built from {repo_name} revision {revision}'>{revision}</a>\n" .format( revision=head_rev, source_repo=head_repo, repo_name=head_repo.split('/')[-1], ) ] return [ ' '.join(hg_command), ' '.join(logging_command), ] hg_commands = checkout_repo(base_repo=env['GECKO_BASE_REPOSITORY'], head_repo=env['GECKO_HEAD_REPOSITORY'], head_rev=env['GECKO_HEAD_REV'], path='.\\build\\src') if run['comm-checkout']: hg_commands.extend( checkout_repo(base_repo=env['COMM_BASE_REPOSITORY'], head_repo=env['COMM_HEAD_REPOSITORY'], head_rev=env['COMM_HEAD_REV'], path='.\\build\\src\\comm')) worker['command'] = [] if taskdesc.get('needs-sccache'): worker['command'].extend([ # Make the comment part of the first command, as it will help users to # understand what is going on, and why these steps are implemented. dedent('''\ :: sccache currently uses the full compiler commandline as input to the :: cache hash key, so create a symlink to the task dir and build from :: the symlink dir to get consistent paths. if exist z:\\build rmdir z:\\build'''), r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ]) worker['command'].extend(hg_commands) worker['command'].extend([' '.join(mh_command)])
def docker_worker_phone_builder(config, job, taskdesc): run = job['run'] worker = taskdesc.get('worker') worker['artifacts'] = [{ 'name': 'private/build', 'path': '/home/worker/artifacts/', 'type': 'directory', }, { 'name': 'public/build', 'path': '/home/worker/artifacts-public/', 'type': 'directory', }] docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) if config.params['project'] != 'try': taskdesc['worker']['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-{}-{}'.format( config.params['level'], config.params['project'], taskdesc['attributes']['build_platform'], taskdesc['attributes']['build_type'], ), 'mount-point': "/home/worker/workspace", }) taskdesc['worker']['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-{}-{}-objdir-gecko'.format( config.params['level'], config.params['project'], taskdesc['attributes']['build_platform'], taskdesc['attributes']['build_type'], ), 'mount-point': "/home/worker/objdir-gecko", }) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': 'b2g/taskcluster-phone-eng.py', 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'TARGET': run['target'], }) if run['debug']: env['B2G_DEBUG'] = '1' # tooltool downloads worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.internal', 'docker-worker:relengapi-proxy:tooltool.download.public', ]) worker['command'] = [ "/bin/bash", "-c", "checkout-gecko workspace" " && cd ./workspace/gecko/taskcluster/scripts/phone-builder" " && buildbot_step 'Build' ./build-phone.sh $HOME/workspace", ]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any( artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = 'workspace/build/src/mach python ' else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = [ '--sparse-profile', 'build/sparse-profiles/{}'.format(run['sparse-profile']) ] worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', ] + sparse_profile + [ '--', 'bash', '-c', 'cd /builds/worker && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def mozharness_on_windows(config, job, taskdesc): run = job['run'] # fail if invalid run options are included invalid = [] for prop in ['actions', 'custom-build-variant-cfg', 'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb']: if prop in run and run[prop]: invalid.append(prop) if not run.get('keep-artifacts', True): invalid.append('keep-artifacts') if invalid: raise Exception("Jobs run using mozharness on Windows do not support properties " + ', '.join(invalid)) worker = taskdesc['worker'] worker['artifacts'] = [{ 'path': r'public\build', 'type': 'directory', }] docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) mh_command = [r'c:\mozilla-build\python\python.exe'] mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')])) for cfg in run['config']: mh_command.append('--config ' + cfg.replace('/', '\\')) mh_command.append('--branch ' + config.params['project']) mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') for option in run.get('options', []): mh_command.append('--' + option) hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] hg_command.append('robustcheckout') hg_command.extend(['--sharebase', 'y:\\hg-shared']) hg_command.append('--purge') hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) hg_command.append(env['GECKO_HEAD_REPOSITORY']) hg_command.append('.\\build\\src') worker['command'] = [] # sccache currently uses the full compiler commandline as input to the # cache hash key, so create a symlink to the task dir and build from # the symlink dir to get consistent paths. if taskdesc.get('needs-sccache'): worker['command'].extend([ r'if exist z:\build rmdir z:\build', r'mklink /d z:\build %cd%', # Grant delete permission on the link to everyone. r'icacls z:\build /grant *S-1-1-0:D /L', r'cd /d z:\build', ]) worker['command'].extend([ ' '.join(hg_command), ' '.join(mh_command) ])
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] # running via mozharness assumes desktop-build (which contains build.sh) taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"} worker['relengapi-proxy'] = False # but maybe enabled for tooltool below worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' # tooltool downloads if run['tooltool-downloads']: worker['relengapi-proxy'] = True worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) if run['tooltool-downloads'] == 'internal': taskdesc['scopes'].append( 'docker-worker:relengapi-proxy:tooltool.download.internal') env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' docker_worker_setup_secrets(config, job, taskdesc) worker['command'] = ["/bin/bash", "bin/build.sh"]