def docker_worker_hazard(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_add_tooltool(config, job, taskdesc) docker_worker_setup_secrets(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # script parameters if run.get('mozconfig'): env['MOZCONFIG'] = run['mozconfig'] # build-haz-linux.sh needs this otherwise it assumes the checkout is in # the workspace. env['GECKO_DIR'] = '/builds/worker/checkouts/gecko' worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout', '/builds/worker/checkouts/gecko', '--', '/bin/bash', '-c', run['command'] ]
def docker_worker_spidermonkey(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker.setdefault('caches', []).append({ 'type': 'persistent', 'name': 'level-{}-{}-build-spidermonkey-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/builds/worker/workspace", 'skip-untrusted': True, }) docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_tooltool(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) support_vcs_checkout(config, job, taskdesc) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" elif run['using'] == 'spidermonkey-rust-bindings': script = "build-sm-rust-bindings.sh" worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout', '/builds/worker/workspace/build/src', '--', '/bin/bash', '-c', 'cd /builds/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script ]
def mozharness_test_on_docker(config, job, taskdesc): test = taskdesc['run']['test'] mozharness = test['mozharness'] worker = taskdesc['worker'] # apply some defaults worker['docker-image'] = test['docker-image'] worker['allow-ptrace'] = True # required for all tests, for crashreporter worker['loopback-video'] = test['loopback-video'] worker['loopback-audio'] = test['loopback-audio'] worker['max-run-time'] = test['max-run-time'] worker['retry-exit-status'] = test['retry-exit-status'] artifacts = [ # (artifact name prefix, in-image path) ("public/logs/", "/builds/worker/workspace/build/upload/logs/"), ("public/test", "/builds/worker/artifacts/"), ("public/test_info/", "/builds/worker/workspace/build/blobber_upload_dir/"), ] installer_url = get_artifact_url('<build>', mozharness['build-artifact-name']) mozharness_url = get_artifact_url('<build>', 'public/build/mozharness.zip') worker['artifacts'] = [{ 'name': prefix, 'path': os.path.join('/builds/worker/workspace', path), 'type': 'directory', } for (prefix, path) in artifacts] worker['caches'] = [{ 'type': 'persistent', 'name': 'level-{}-{}-test-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/builds/worker/workspace", }] env = worker['env'] = { 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']), 'MOZHARNESS_SCRIPT': mozharness['script'], 'MOZILLA_BUILD_URL': {'task-reference': installer_url}, 'NEED_PULSEAUDIO': 'true', 'NEED_WINDOW_MANAGER': 'true', 'ENABLE_E10S': str(bool(test.get('e10s'))).lower(), 'MOZ_AUTOMATION': '1', } if mozharness.get('mochitest-flavor'): env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor'] if mozharness['set-moz-node-path']: env['MOZ_NODE_PATH'] = '/usr/local/bin/node' if 'actions' in mozharness: env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions']) if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] # handle some of the mozharness-specific options if mozharness['tooltool-downloads']: docker_worker_add_tooltool(config, job, taskdesc, internal=True) if test['reboot']: raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot'])) # assemble the command line command = [ '/builds/worker/bin/run-task', ] # Support vcs checkouts regardless of whether the task runs from # source or not in case it is needed on an interactive loaner. support_vcs_checkout(config, job, taskdesc) # If we have a source checkout, run mozharness from it instead of # downloading a zip file with the same content. if test['checkout']: command.extend(['--vcs-checkout', '/builds/worker/checkouts/gecko']) env['MOZHARNESS_PATH'] = '/builds/worker/checkouts/gecko/testing/mozharness' else: env['MOZHARNESS_URL'] = {'task-reference': mozharness_url} command.extend([ '--', '/builds/worker/bin/test-linux.sh', ]) if mozharness.get('no-read-buildbot-config'): command.append("--no-read-buildbot-config") command.extend([ {"task-reference": "--installer-url=" + installer_url}, {"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)}, ]) command.extend(mozharness.get('extra-options', [])) # TODO: remove the need for run['chunked'] if mozharness.get('chunked') or test['chunks'] > 1: # Implement mozharness['chunking-args'], modifying command in place if mozharness['chunking-args'] == 'this-chunk': command.append('--total-chunk={}'.format(test['chunks'])) command.append('--this-chunk={}'.format(test['this-chunk'])) elif mozharness['chunking-args'] == 'test-suite-suffix': suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk'])) for i, c in enumerate(command): if isinstance(c, basestring) and c.startswith('--test-suite'): command[i] += suffix if 'download-symbols' in mozharness: download_symbols = mozharness['download-symbols'] download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols) command.append('--download-symbols=' + download_symbols) worker['command'] = command
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] if not run['use-simple-package']: raise NotImplementedError("Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers") if not run['use-magic-mh-args']: raise NotImplementedError("Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers") # Running via mozharness assumes an image that contains build.sh: # by default, debian7-amd64-build, but it could be another image (like # android-build). taskdesc['worker'].setdefault('docker-image', {'in-tree': 'debian7-amd64-build'}) taskdesc['worker'].setdefault('artifacts', []).append({ 'name': 'public/logs', 'path': '{workdir}/logs/'.format(**run), 'type': 'directory' }) worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache( config, job, taskdesc, extra=run.get('extra-workspace-cache-key')) support_vcs_checkout(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'GECKO_PATH': '{workdir}/workspace/build/src'.format(**run), 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MOZ_SOURCE_CHANGESET': env['GECKO_HEAD_REV'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', 'PYTHONUNBUFFERED': '1', }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'config-paths' in run: env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run['config-paths']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] if 'extra-config' in run: env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config']) if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] if run['comm-checkout']: env['MOZ_SOURCE_CHANGESET'] = env['COMM_HEAD_REV'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = [4] setup_secrets(config, job, taskdesc) command = [ '{workdir}/bin/run-task'.format(**run), '--gecko-checkout', env['GECKO_PATH'], ] if run['comm-checkout']: command.append( '--comm-checkout={workdir}/workspace/build/src/comm'.format(**run)) command += [ '--', '{workdir}/workspace/build/src/{script}'.format( workdir=run['workdir'], script=run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh'), ), ] worker['command'] = command
def mozharness_test_on_docker(config, job, taskdesc): run = job['run'] test = taskdesc['run']['test'] mozharness = test['mozharness'] worker = taskdesc['worker'] # apply some defaults worker['docker-image'] = test['docker-image'] worker['allow-ptrace'] = True # required for all tests, for crashreporter worker['loopback-video'] = test['loopback-video'] worker['loopback-audio'] = test['loopback-audio'] worker['max-run-time'] = test['max-run-time'] worker['retry-exit-status'] = test['retry-exit-status'] if 'android-em-7.0-x86' in test['test-platform']: worker['privileged'] = True artifacts = [ # (artifact name prefix, in-image path) ("public/logs/", "{workdir}/workspace/build/upload/logs/".format(**run) ), ("public/test", "{workdir}/artifacts/".format(**run)), ("public/test_info/", "{workdir}/workspace/build/blobber_upload_dir/".format(**run)), ] installer_url = get_artifact_url('<build>', mozharness['build-artifact-name']) mozharness_url = get_artifact_url( '<build>', get_artifact_path(taskdesc, 'mozharness.zip')) worker['artifacts'] = [{ 'name': prefix, 'path': os.path.join('{workdir}/workspace'.format(**run), path), 'type': 'directory', } for (prefix, path) in artifacts] worker['caches'] = [{ 'type': 'persistent', 'name': 'level-{}-{}-test-workspace'.format(config.params['level'], config.params['project']), 'mount-point': "{workdir}/workspace".format(**run), }] env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']), 'MOZHARNESS_SCRIPT': mozharness['script'], 'MOZILLA_BUILD_URL': { 'task-reference': installer_url }, 'NEED_PULSEAUDIO': 'true', 'NEED_WINDOW_MANAGER': 'true', 'ENABLE_E10S': str(bool(test.get('e10s'))).lower(), 'MOZ_AUTOMATION': '1', }) if mozharness.get('mochitest-flavor'): env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor'] if mozharness['set-moz-node-path']: env['MOZ_NODE_PATH'] = '/usr/local/bin/node' if 'actions' in mozharness: env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions']) if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] # handle some of the mozharness-specific options if mozharness['tooltool-downloads']: docker_worker_add_tooltool(config, job, taskdesc, internal=True) if test['reboot']: raise Exception('reboot: {} not supported on generic-worker'.format( test['reboot'])) # assemble the command line command = [ '{workdir}/bin/run-task'.format(**run), ] # Support vcs checkouts regardless of whether the task runs from # source or not in case it is needed on an interactive loaner. support_vcs_checkout(config, job, taskdesc) # If we have a source checkout, run mozharness from it instead of # downloading a zip file with the same content. if test['checkout']: command.extend( ['--vcs-checkout', '{workdir}/checkouts/gecko'.format(**run)]) env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format( **run) else: env['MOZHARNESS_URL'] = {'task-reference': mozharness_url} command.extend([ '--', '{workdir}/bin/test-linux.sh'.format(**run), ]) command.extend([ { "task-reference": "--installer-url=" + installer_url }, { "task-reference": "--test-packages-url=" + test_packages_url(taskdesc) }, ]) command.extend(mozharness.get('extra-options', [])) # TODO: remove the need for run['chunked'] if mozharness.get('chunked') or test['chunks'] > 1: # Implement mozharness['chunking-args'], modifying command in place if mozharness['chunking-args'] == 'this-chunk': command.append('--total-chunk={}'.format(test['chunks'])) command.append('--this-chunk={}'.format(test['this-chunk'])) elif mozharness['chunking-args'] == 'test-suite-suffix': suffix = mozharness['chunk-suffix'].replace( '<CHUNK>', str(test['this-chunk'])) for i, c in enumerate(command): if isinstance(c, basestring) and c.startswith('--test-suite'): command[i] += suffix if 'download-symbols' in mozharness: download_symbols = mozharness['download-symbols'] download_symbols = { True: 'true', False: 'false' }.get(download_symbols, download_symbols) command.append('--download-symbols=' + download_symbols) worker['command'] = command
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] taskdesc['run-on-projects'] = ['trunk', 'try'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any( artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = 'workspace/build/src/mach python ' else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = [ '--sparse-profile', 'build/sparse-profiles/{}'.format(run['sparse-profile']) ] worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', ] + sparse_profile + [ '--', 'bash', '-c', 'cd /builds/worker && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] add_optimization(config, run, taskdesc)
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] if not run['use-simple-package']: raise NotImplementedError("Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers") if not run['use-magic-mh-args']: raise NotImplementedError("Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers") # Running via mozharness assumes an image that contains build.sh: # by default, desktop-build, but it could be another image (like # android-gradle-build) that "inherits" from desktop-build. if not taskdesc['worker']['docker-image']: taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"} worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache( config, job, taskdesc, extra=run.get('extra-workspace-cache-key')) support_vcs_checkout(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = 4 docker_worker_setup_secrets(config, job, taskdesc) command = [ '/builds/worker/bin/run-task', '--vcs-checkout', '/builds/worker/workspace/build/src', '--tools-checkout', '/builds/worker/workspace/build/tools', '--', '/builds/worker/workspace/build/src/{}'.format( run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh')), ] worker['command'] = command
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any( artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_artifacts(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) # Toolchain checkouts don't live under {workdir}/checkouts workspace = '{workdir}/workspace/build'.format(**run) gecko_path = '{}/src'.format(workspace) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', 'MOZ_FETCHES_DIR': workspace, 'GECKO_PATH': gecko_path, }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = '{}/mach python '.format(gecko_path) else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = [ '--sparse-profile=build/sparse-profiles/{}'.format( run['sparse-profile']) ] worker['command'] = [ '{workdir}/bin/run-task'.format(**run), '--vcs-checkout={}'.format(gecko_path), ] + sparse_profile + [ '--', 'bash', '-c', 'cd {} && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( run['workdir'], wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) taskdesc['cache'] = { 'type': CACHE_TYPE, 'name': name, 'digest-data': get_digest_data(config, run, taskdesc), }
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['chain-of-trust'] = True # Allow the job to specify where artifacts come from, but add # public/build if it's not there already. artifacts = worker.setdefault('artifacts', []) if not any(artifact.get('name') == 'public/build' for artifact in artifacts): docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) support_vcs_checkout(config, job, taskdesc, sparse=True) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', 'MOZ_AUTOMATION': '1', }) if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Use `mach` to invoke python scripts so in-tree libraries are available. if run['script'].endswith('.py'): wrapper = 'workspace/build/src/mach python ' else: wrapper = '' args = run.get('arguments', '') if args: args = ' ' + shell_quote(*args) sparse_profile = [] if run.get('sparse-profile'): sparse_profile = ['--sparse-profile', 'build/sparse-profiles/{}'.format(run['sparse-profile'])] worker['command'] = [ '/builds/worker/bin/run-task', '--vcs-checkout=/builds/worker/workspace/build/src', ] + sparse_profile + [ '--', 'bash', '-c', 'cd /builds/worker && ' '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format( wrapper, run['script'], args) ] attributes = taskdesc.setdefault('attributes', {}) attributes['toolchain-artifact'] = run['toolchain-artifact'] if 'toolchain-alias' in run: attributes['toolchain-alias'] = run['toolchain-alias'] if not taskgraph.fast: name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1) add_optimization( config, taskdesc, cache_type=CACHE_TYPE, cache_name=name, digest_data=get_digest_data(config, run, taskdesc), )
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] if not run['use-simple-package']: raise NotImplementedError("Simple packaging cannot be disabled via" "'use-simple-package' on docker-workers") if not run['use-magic-mh-args']: raise NotImplementedError("Cannot disabled mh magic arg passing via" "'use-magic-mh-args' on docker-workers") # Running via mozharness assumes an image that contains build.sh: # by default, debian7-amd64-build, but it could be another image (like # android-build). taskdesc['worker'].setdefault('docker-image', {'in-tree': 'debian7-amd64-build'}) worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc, extra=run.get('extra-workspace-cache-key')) support_vcs_checkout(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'MOZ_AUTOMATION': '1', }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'config-paths' in run: env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run['config-paths']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] if 'extra-config' in run: env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config']) if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] if config.params.is_try(): env['TRY_COMMIT_MSG'] = config.params['message'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' if run['tooltool-downloads']: internal = run['tooltool-downloads'] == 'internal' docker_worker_add_tooltool(config, job, taskdesc, internal=internal) # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = 4 docker_worker_setup_secrets(config, job, taskdesc) command = [ '/builds/worker/bin/run-task', '--vcs-checkout', '/builds/worker/workspace/build/src', '--tools-checkout', '/builds/worker/workspace/build/tools', ] if run['comm-checkout']: command.append('--comm-checkout=/builds/worker/workspace/build/src/comm') command += [ '--', '/builds/worker/workspace/build/src/{}'.format( run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh') ), ] worker['command'] = command