def docker_worker_run_task(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = copy.deepcopy(job['worker']) if run['checkout']: docker_worker_support_vcs_checkout(config, job, taskdesc) if run['requires-build']: docker_worker_add_build_dependency(config, job, taskdesc) if run.get('cache-dotcache') and int(config.params['level']) > 1: worker['caches'].append({ 'type': 'persistent', 'name': 'level-{level}-{project}-dotcache'.format(**config.params), 'mount-point': '/home/worker/.cache', }) run_command = run['command'] if isinstance(run_command, basestring): run_command = ['bash', '-cx', run_command] command = ['/home/worker/bin/run-task'] if run['checkout']: command.append('--vcs-checkout=/home/worker/checkouts/gecko') command.append('--') command.extend(run_command) worker['command'] = command
def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] if int(config.params['level']) > 1: worker['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-spidermonkey-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/home/worker/workspace", }) docker_worker_add_public_artifacts(config, job, taskdesc) env = worker['env'] env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) # tooltool downloads; note that this script downloads using the API # endpoiint directly, rather than via relengapi-proxy worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' if run.get('tooltool-manifest'): env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] docker_worker_support_vcs_checkout(config, job, taskdesc) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" worker['command'] = [ '/home/worker/bin/run-task', '--chown-recursive', '/home/worker/workspace', '--chown-recursive', '/home/worker/tooltool-cache', '--vcs-checkout', '/home/worker/workspace/build/src', '--', '/bin/bash', '-c', 'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script ]
def docker_worker_toolchain(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] worker['artifacts'].append({ 'name': 'public', 'path': '/home/worker/workspace/artifacts/', 'type': 'directory', }) docker_worker_add_tc_vcs_cache(config, job, taskdesc) docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) docker_worker_support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], 'TOOLS_DISABLE': 'true', }) # tooltool downloads. By default we download using the API endpoint, but # the job can optionally request relengapi-proxy (for example when downloading # internal tooltool resources. So we define the tooltool cache unconditionally. worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' # tooltool downloads worker['relengapi-proxy'] = False # but maybe enabled for tooltool below if run['tooltool-downloads']: worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) if run['tooltool-downloads'] == 'internal': taskdesc['scopes'].append( 'docker-worker:relengapi-proxy:tooltool.download.internal') worker['command'] = [ '/home/worker/bin/run-task', '--vcs-checkout=/home/worker/workspace/build/src', '--', 'bash', '-c', 'cd /home/worker && ' './workspace/build/src/taskcluster/scripts/misc/{}'.format( run['script']) ] add_index_paths(config, run, taskdesc)
def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] if int(config.params['level']) > 1: worker['caches'].append({ 'type': 'persistent', 'name': 'level-{}-{}-build-spidermonkey-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/home/worker/workspace", }) docker_worker_add_public_artifacts(config, job, taskdesc) env = worker['env'] env.update({ 'MOZHARNESS_DISABLE': 'true', 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) # tooltool downloads; note that this script downloads using the API # endpoiint directly, rather than via relengapi-proxy worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' if run.get('tooltool-manifest'): env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] docker_worker_support_vcs_checkout(config, job, taskdesc) script = "build-sm.sh" if run['using'] == 'spidermonkey-package': script = "build-sm-package.sh" elif run['using'] == 'spidermonkey-mozjs-crate': script = "build-sm-mozjs-crate.sh" worker['command'] = [ '/home/worker/bin/run-task', '--chown-recursive', '/home/worker/workspace', '--chown-recursive', '/home/worker/tooltool-cache', '--vcs-checkout', '/home/worker/workspace/build/src', '--', '/bin/bash', '-c', 'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script ]
def docker_worker_hazard(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['artifacts'] = [] worker['caches'] = [] docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_setup_secrets(config, job, taskdesc) docker_worker_support_vcs_checkout(config, job, taskdesc) env = worker['env'] env.update({ 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) # script parameters if run.get('tooltool-manifest'): env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] if run.get('mozconfig'): env['MOZCONFIG'] = run['mozconfig'] # tooltool downloads worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) worker['relengapi-proxy'] = True taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' # build-haz-linux.sh needs this otherwise it assumes the checkout is in # the workspace. env['GECKO_DIR'] = '/home/worker/checkouts/gecko' worker['command'] = [ '/home/worker/bin/run-task', '--chown-recursive', '/home/worker/tooltool-cache', '--chown-recursive', '/home/worker/workspace', '--vcs-checkout', '/home/worker/checkouts/gecko', '--', '/bin/bash', '-c', run['command'] ]
def docker_worker_run_task(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] = copy.deepcopy(job['worker']) if run['checkout']: docker_worker_support_vcs_checkout(config, job, taskdesc) if run.get('cache-dotcache') and int(config.params['level']) > 1: worker['caches'].append({ 'type': 'persistent', 'name': 'level-{level}-{project}-dotcache'.format(**config.params), 'mount-point': '/home/worker/.cache', }) run_command = run['command'] if isinstance(run_command, basestring): run_command = ['bash', '-cx', run_command] command = ['/home/worker/bin/run-task'] if run['checkout']: command.append('--vcs-checkout=/home/worker/checkouts/gecko') command.append('--') command.extend(run_command) worker['command'] = command
def docker_worker_setup(config, test, taskdesc): artifacts = [ # (artifact name prefix, in-image path) ("public/logs/", "/home/worker/workspace/build/upload/logs/"), ("public/test", "/home/worker/artifacts/"), ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"), ] mozharness = test['mozharness'] installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name']) test_packages_url = ARTIFACT_URL.format( '<build>', 'public/build/target.test_packages.json') mozharness_url = ARTIFACT_URL.format('<build>', 'public/build/mozharness.zip') taskdesc['worker-type'] = WORKER_TYPE[test['instance-size']] worker = taskdesc['worker'] = {} worker['implementation'] = test['worker-implementation'] worker['docker-image'] = test['docker-image'] worker['allow-ptrace'] = True # required for all tests, for crashreporter worker['relengapi-proxy'] = False # but maybe enabled for tooltool below worker['loopback-video'] = test['loopback-video'] worker['loopback-audio'] = test['loopback-audio'] worker['max-run-time'] = test['max-run-time'] worker['retry-exit-status'] = test['retry-exit-status'] worker['artifacts'] = [{ 'name': prefix, 'path': os.path.join('/home/worker/workspace', path), 'type': 'directory', } for (prefix, path) in artifacts] worker['caches'] = [{ 'type': 'persistent', 'name': 'level-{}-{}-test-workspace'.format(config.params['level'], config.params['project']), 'mount-point': "/home/worker/workspace", }] env = worker['env'] = { 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']), 'MOZHARNESS_SCRIPT': mozharness['script'], 'MOZILLA_BUILD_URL': { 'task-reference': installer_url }, 'NEED_PULSEAUDIO': 'true', 'NEED_WINDOW_MANAGER': 'true', } if mozharness['set-moz-node-path']: env['MOZ_NODE_PATH'] = '/usr/local/bin/node' if 'actions' in mozharness: env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions']) if config.params['project'] == 'try': env['TRY_COMMIT_MSG'] = config.params['message'] # handle some of the mozharness-specific options if mozharness['tooltool-downloads']: worker['relengapi-proxy'] = True worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.internal', 'docker-worker:relengapi-proxy:tooltool.download.public', ]) # assemble the command line command = [ '/home/worker/bin/run-task', # The workspace cache/volume is default owned by root:root. '--chown', '/home/worker/workspace', ] # Support vcs checkouts regardless of whether the task runs from # source or not in case it is needed on an interactive loaner. docker_worker_support_vcs_checkout(config, test, taskdesc) # If we have a source checkout, run mozharness from it instead of # downloading a zip file with the same content. if test['checkout']: command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko']) env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness' else: env['MOZHARNESS_URL'] = {'task-reference': mozharness_url} command.extend([ '--', '/home/worker/bin/test-linux.sh', ]) if mozharness.get('no-read-buildbot-config'): command.append("--no-read-buildbot-config") command.extend([ { "task-reference": "--installer-url=" + installer_url }, { "task-reference": "--test-packages-url=" + test_packages_url }, ]) command.extend(mozharness.get('extra-options', [])) # TODO: remove the need for run['chunked'] if mozharness.get('chunked') or test['chunks'] > 1: # Implement mozharness['chunking-args'], modifying command in place if mozharness['chunking-args'] == 'this-chunk': command.append('--total-chunk={}'.format(test['chunks'])) command.append('--this-chunk={}'.format(test['this-chunk'])) elif mozharness['chunking-args'] == 'test-suite-suffix': suffix = mozharness['chunk-suffix'].replace( '<CHUNK>', str(test['this-chunk'])) for i, c in enumerate(command): if isinstance(c, basestring) and c.startswith('--test-suite'): command[i] += suffix if 'download-symbols' in mozharness: download_symbols = mozharness['download-symbols'] download_symbols = { True: 'true', False: 'false' }.get(download_symbols, download_symbols) command.append('--download-symbols=' + download_symbols) worker['command'] = command
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] # running via mozharness assumes desktop-build (which contains build.sh) taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"} worker['relengapi-proxy'] = False # but maybe enabled for tooltool below worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_support_vcs_checkout(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': config.params['moz_build_date'], 'MOZ_SCM_LEVEL': config.params['level'], }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' # tooltool downloads if run['tooltool-downloads']: worker['relengapi-proxy'] = True worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) if run['tooltool-downloads'] == 'internal': taskdesc['scopes'].append( 'docker-worker:relengapi-proxy:tooltool.download.internal') env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = 4 docker_worker_setup_secrets(config, job, taskdesc) command = [ '/home/worker/bin/run-task', # Various caches/volumes are default owned by root:root. '--chown-recursive', '/home/worker/workspace', '--chown-recursive', '/home/worker/tooltool-cache', '--vcs-checkout', '/home/worker/workspace/build/src', '--tools-checkout', '/home/worker/workspace/build/tools', '--', ] command.append("/home/worker/workspace/build/src/{}".format( run.get('job-script', "taskcluster/scripts/builder/build-linux.sh"))) worker['command'] = command
def docker_worker_setup(config, test, taskdesc): artifacts = [ # (artifact name prefix, in-image path) ("public/logs/", "/home/worker/workspace/build/upload/logs/"), ("public/test", "/home/worker/artifacts/"), ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"), ] mozharness = test['mozharness'] installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name']) test_packages_url = ARTIFACT_URL.format('<build>', 'public/build/target.test_packages.json') mozharness_url = ARTIFACT_URL.format('<build>', 'public/build/mozharness.zip') taskdesc['worker-type'] = WORKER_TYPE[test['instance-size']] worker = taskdesc['worker'] = {} worker['implementation'] = test['worker-implementation'] worker['docker-image'] = test['docker-image'] worker['allow-ptrace'] = True # required for all tests, for crashreporter worker['relengapi-proxy'] = False # but maybe enabled for tooltool below worker['loopback-video'] = test['loopback-video'] worker['loopback-audio'] = test['loopback-audio'] worker['max-run-time'] = test['max-run-time'] worker['retry-exit-status'] = test['retry-exit-status'] worker['artifacts'] = [{ 'name': prefix, 'path': os.path.join('/home/worker/workspace', path), 'type': 'directory', } for (prefix, path) in artifacts] worker['caches'] = [{ 'type': 'persistent', 'name': 'level-{}-{}-test-workspace'.format( config.params['level'], config.params['project']), 'mount-point': "/home/worker/workspace", }] env = worker['env'] = { 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']), 'MOZHARNESS_SCRIPT': mozharness['script'], 'MOZILLA_BUILD_URL': {'task-reference': installer_url}, 'NEED_PULSEAUDIO': 'true', 'NEED_WINDOW_MANAGER': 'true', } if mozharness['set-moz-node-path']: env['MOZ_NODE_PATH'] = '/usr/local/bin/node' if 'actions' in mozharness: env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions']) if config.params['project'] == 'try': env['TRY_COMMIT_MSG'] = config.params['message'] # handle some of the mozharness-specific options if mozharness['tooltool-downloads']: worker['relengapi-proxy'] = True worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.internal', 'docker-worker:relengapi-proxy:tooltool.download.public', ]) # assemble the command line command = [ '/home/worker/bin/run-task', # The workspace cache/volume is default owned by root:root. '--chown', '/home/worker/workspace', ] # Support vcs checkouts regardless of whether the task runs from # source or not in case it is needed on an interactive loaner. docker_worker_support_vcs_checkout(config, test, taskdesc) # If we have a source checkout, run mozharness from it instead of # downloading a zip file with the same content. if test['checkout']: command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko']) env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness' else: env['MOZHARNESS_URL'] = {'task-reference': mozharness_url} command.extend([ '--', '/home/worker/bin/test-linux.sh', ]) if mozharness.get('no-read-buildbot-config'): command.append("--no-read-buildbot-config") command.extend([ {"task-reference": "--installer-url=" + installer_url}, {"task-reference": "--test-packages-url=" + test_packages_url}, ]) command.extend(mozharness.get('extra-options', [])) # TODO: remove the need for run['chunked'] if mozharness.get('chunked') or test['chunks'] > 1: # Implement mozharness['chunking-args'], modifying command in place if mozharness['chunking-args'] == 'this-chunk': command.append('--total-chunk={}'.format(test['chunks'])) command.append('--this-chunk={}'.format(test['this-chunk'])) elif mozharness['chunking-args'] == 'test-suite-suffix': suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk'])) for i, c in enumerate(command): if isinstance(c, basestring) and c.startswith('--test-suite'): command[i] += suffix if 'download-symbols' in mozharness: download_symbols = mozharness['download-symbols'] download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols) command.append('--download-symbols=' + download_symbols) worker['command'] = command
def docker_worker_setup(config, test, taskdesc): artifacts = [ # (artifact name prefix, in-image path) ("public/logs/", "/home/worker/workspace/build/upload/logs/"), ("public/test", "/home/worker/artifacts/"), ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"), ] mozharness = test["mozharness"] installer_url = ARTIFACT_URL.format("<build>", mozharness["build-artifact-name"]) test_packages_url = ARTIFACT_URL.format("<build>", "public/build/target.test_packages.json") mozharness_url = ARTIFACT_URL.format("<build>", "public/build/mozharness.zip") taskdesc["worker-type"] = WORKER_TYPE[test["instance-size"]] worker = taskdesc["worker"] = {} worker["implementation"] = test["worker-implementation"] worker["docker-image"] = test["docker-image"] worker["allow-ptrace"] = True # required for all tests, for crashreporter worker["relengapi-proxy"] = False # but maybe enabled for tooltool below worker["loopback-video"] = test["loopback-video"] worker["loopback-audio"] = test["loopback-audio"] worker["max-run-time"] = test["max-run-time"] worker["retry-exit-status"] = test["retry-exit-status"] worker["artifacts"] = [{"name": prefix, "path": path, "type": "directory"} for (prefix, path) in artifacts] worker["caches"] = [ { "type": "persistent", "name": "level-{}-{}-test-workspace".format(config.params["level"], config.params["project"]), "mount-point": "/home/worker/workspace", } ] env = worker["env"] = { "MOZHARNESS_CONFIG": " ".join(mozharness["config"]), "MOZHARNESS_SCRIPT": mozharness["script"], "MOZILLA_BUILD_URL": {"task-reference": installer_url}, "NEED_PULSEAUDIO": "true", "NEED_WINDOW_MANAGER": "true", } if mozharness["set-moz-node-path"]: env["MOZ_NODE_PATH"] = "/usr/local/bin/node" if "actions" in mozharness: env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"]) if config.params["project"] == "try": env["TRY_COMMIT_MSG"] = config.params["message"] # handle some of the mozharness-specific options if mozharness["tooltool-downloads"]: worker["relengapi-proxy"] = True worker["caches"].append( {"type": "persistent", "name": "tooltool-cache", "mount-point": "/home/worker/tooltool-cache"} ) taskdesc["scopes"].extend( [ "docker-worker:relengapi-proxy:tooltool.download.internal", "docker-worker:relengapi-proxy:tooltool.download.public", ] ) # assemble the command line command = [ "/home/worker/bin/run-task", # The workspace cache/volume is default owned by root:root. "--chown", "/home/worker/workspace", ] # Support vcs checkouts regardless of whether the task runs from # source or not in case it is needed on an interactive loaner. docker_worker_support_vcs_checkout(config, test, taskdesc) # If we have a source checkout, run mozharness from it instead of # downloading a zip file with the same content. if test["checkout"]: command.extend(["--vcs-checkout", "/home/worker/checkouts/gecko"]) env["MOZHARNESS_PATH"] = "/home/worker/checkouts/gecko/testing/mozharness" else: env["MOZHARNESS_URL"] = {"task-reference": mozharness_url} command.extend(["--", "/home/worker/bin/test-linux.sh"]) if mozharness.get("no-read-buildbot-config"): command.append("--no-read-buildbot-config") command.extend( [ {"task-reference": "--installer-url=" + installer_url}, {"task-reference": "--test-packages-url=" + test_packages_url}, ] ) command.extend(mozharness.get("extra-options", [])) # TODO: remove the need for run['chunked'] if mozharness.get("chunked") or test["chunks"] > 1: # Implement mozharness['chunking-args'], modifying command in place if mozharness["chunking-args"] == "this-chunk": command.append("--total-chunk={}".format(test["chunks"])) command.append("--this-chunk={}".format(test["this-chunk"])) elif mozharness["chunking-args"] == "test-suite-suffix": suffix = mozharness["chunk-suffix"].replace("<CHUNK>", str(test["this-chunk"])) for i, c in enumerate(command): if isinstance(c, basestring) and c.startswith("--test-suite"): command[i] += suffix if "download-symbols" in mozharness: download_symbols = mozharness["download-symbols"] download_symbols = {True: "true", False: "false"}.get(download_symbols, download_symbols) command.append("--download-symbols=" + download_symbols) worker["command"] = command
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job['run'] worker = taskdesc['worker'] worker['implementation'] = job['worker']['implementation'] # running via mozharness assumes desktop-build (which contains build.sh) taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"} worker['relengapi-proxy'] = False # but maybe enabled for tooltool below worker['taskcluster-proxy'] = run.get('taskcluster-proxy') docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_support_vcs_checkout(config, job, taskdesc) env = worker.setdefault('env', {}) env.update({ 'MOZHARNESS_CONFIG': ' '.join(run['config']), 'MOZHARNESS_SCRIPT': run['script'], 'MH_BRANCH': config.params['project'], 'MH_BUILD_POOL': 'taskcluster', 'MOZ_BUILD_DATE': time.strftime("%Y%m%d%H%M%S", time.gmtime(config.params['pushdate'])), 'MOZ_SCM_LEVEL': config.params['level'], }) if 'actions' in run: env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) if 'options' in run: env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) if 'custom-build-variant-cfg' in run: env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] if 'job-script' in run: env['JOB_SCRIPT'] = run['job-script'] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run['keep-artifacts']: env['DIST_TARGET_UPLOADS'] = '' env['DIST_UPLOADS'] = '' # Xvfb if run['need-xvfb']: env['NEED_XVFB'] = 'true' # tooltool downloads if run['tooltool-downloads']: worker['relengapi-proxy'] = True worker['caches'].append({ 'type': 'persistent', 'name': 'tooltool-cache', 'mount-point': '/home/worker/tooltool-cache', }) taskdesc['scopes'].extend([ 'docker-worker:relengapi-proxy:tooltool.download.public', ]) if run['tooltool-downloads'] == 'internal': taskdesc['scopes'].append( 'docker-worker:relengapi-proxy:tooltool.download.internal') env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' env['TOOLTOOL_REV'] = 'master' # Retry if mozharness returns TBPL_RETRY worker['retry-exit-status'] = 4 docker_worker_setup_secrets(config, job, taskdesc) command = [ '/home/worker/bin/run-task', # Various caches/volumes are default owned by root:root. '--chown-recursive', '/home/worker/workspace', '--chown-recursive', '/home/worker/tooltool-cache', '--vcs-checkout', '/home/worker/workspace/build/src', '--tools-checkout', '/home/worker/workspace/build/tools', '--', ] command.append("/home/worker/workspace/build/src/{}".format( run.get('job-script', "taskcluster/scripts/builder/build-linux.sh" ))) worker['command'] = command
def mozharness_on_docker_worker_setup(config, job, taskdesc): run = job["run"] worker = taskdesc["worker"] worker["implementation"] = job["worker"]["implementation"] # running via mozharness assumes desktop-build (which contains build.sh) taskdesc["worker"]["docker-image"] = {"in-tree": "desktop-build"} worker["relengapi-proxy"] = False # but maybe enabled for tooltool below worker["taskcluster-proxy"] = run.get("taskcluster-proxy") docker_worker_add_public_artifacts(config, job, taskdesc) docker_worker_add_workspace_cache(config, job, taskdesc) docker_worker_support_vcs_checkout(config, job, taskdesc) env = worker.setdefault("env", {}) env.update( { "MOZHARNESS_CONFIG": " ".join(run["config"]), "MOZHARNESS_SCRIPT": run["script"], "MH_BRANCH": config.params["project"], "MH_BUILD_POOL": "taskcluster", "MOZ_BUILD_DATE": config.params["moz_build_date"], "MOZ_SCM_LEVEL": config.params["level"], } ) if "actions" in run: env["MOZHARNESS_ACTIONS"] = " ".join(run["actions"]) if "options" in run: env["MOZHARNESS_OPTIONS"] = " ".join(run["options"]) if "custom-build-variant-cfg" in run: env["MH_CUSTOM_BUILD_VARIANT_CFG"] = run["custom-build-variant-cfg"] if "job-script" in run: env["JOB_SCRIPT"] = run["job-script"] # if we're not keeping artifacts, set some env variables to empty values # that will cause the build process to skip copying the results to the # artifacts directory. This will have no effect for operations that are # not builds. if not run["keep-artifacts"]: env["DIST_TARGET_UPLOADS"] = "" env["DIST_UPLOADS"] = "" # Xvfb if run["need-xvfb"]: env["NEED_XVFB"] = "true" # tooltool downloads if run["tooltool-downloads"]: worker["relengapi-proxy"] = True worker["caches"].append( {"type": "persistent", "name": "tooltool-cache", "mount-point": "/home/worker/tooltool-cache"} ) taskdesc["scopes"].extend(["docker-worker:relengapi-proxy:tooltool.download.public"]) if run["tooltool-downloads"] == "internal": taskdesc["scopes"].append("docker-worker:relengapi-proxy:tooltool.download.internal") env["TOOLTOOL_CACHE"] = "/home/worker/tooltool-cache" env["TOOLTOOL_REPO"] = "https://github.com/mozilla/build-tooltool" env["TOOLTOOL_REV"] = "master" # Retry if mozharness returns TBPL_RETRY worker["retry-exit-status"] = 4 docker_worker_setup_secrets(config, job, taskdesc) command = [ "/home/worker/bin/run-task", # Various caches/volumes are default owned by root:root. "--chown-recursive", "/home/worker/workspace", "--chown-recursive", "/home/worker/tooltool-cache", "--vcs-checkout", "/home/worker/workspace/build/src", "--tools-checkout", "/home/worker/workspace/build/tools", "--", ] command.append( "/home/worker/workspace/build/src/{}".format( run.get("job-script", "taskcluster/scripts/builder/build-linux.sh") ) ) worker["command"] = command