예제 #1
0
def docker_worker_hazard(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc)
    docker_worker_add_tooltool(config, job, taskdesc)
    docker_worker_setup_secrets(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # script parameters
    if run.get('mozconfig'):
        env['MOZCONFIG'] = run['mozconfig']

    # build-haz-linux.sh needs this otherwise it assumes the checkout is in
    # the workspace.
    env['GECKO_DIR'] = '/builds/worker/checkouts/gecko'

    worker['command'] = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout', '/builds/worker/checkouts/gecko',
        '--',
        '/bin/bash', '-c', run['command']
    ]
예제 #2
0
def common_setup(config, job, taskdesc):
    run = job['run']
    if run['checkout']:
        support_vcs_checkout(config, job, taskdesc)

    if run['requires-build']:
        add_build_dependency(config, job, taskdesc)
예제 #3
0
def common_setup(config, job, taskdesc):
    run = job['run']
    if run['checkout']:
        support_vcs_checkout(config, job, taskdesc,
                             sparse=bool(run['sparse-profile']))

    taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
예제 #4
0
파일: run_task.py 프로젝트: AlexxNica/gecko
def common_setup(config, job, taskdesc):
    run = job['run']
    if run['checkout']:
        support_vcs_checkout(config, job, taskdesc)

    if run['requires-build']:
        add_build_dependency(config, job, taskdesc)
예제 #5
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['caches'] = []

    worker['artifacts'].append({
        'name': 'public',
        'path': '/home/worker/workspace/artifacts/',
        'type': 'directory',
    })

    docker_worker_add_tc_vcs_cache(config, job, taskdesc)
    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
    })

    # tooltool downloads.  By default we download using the API endpoint, but
    # the job can optionally request relengapi-proxy (for example when downloading
    # internal tooltool resources.  So we define the tooltool cache unconditionally.
    worker['caches'].append({
        'type': 'persistent',
        'name': 'tooltool-cache',
        'mount-point': '/home/worker/tooltool-cache',
    })
    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'

    # tooltool downloads
    worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
    if run['tooltool-downloads']:
        worker['relengapi-proxy'] = True
        taskdesc['scopes'].extend([
            'docker-worker:relengapi-proxy:tooltool.download.public',
        ])
        if run['tooltool-downloads'] == 'internal':
            taskdesc['scopes'].append(
                'docker-worker:relengapi-proxy:tooltool.download.internal')

    worker['command'] = [
        '/home/worker/bin/run-task',
        # Various caches/volumes are default owned by root:root.
        '--chown-recursive', '/home/worker/workspace',
        '--chown-recursive', '/home/worker/tooltool-cache',
        '--vcs-checkout=/home/worker/workspace/build/src',
        '--',
        'bash',
        '-c',
        'cd /home/worker && '
        './workspace/build/src/taskcluster/scripts/misc/{}'.format(
            run['script'])
    ]

    add_optimizations(config, run, taskdesc)
예제 #6
0
def common_setup(config, job, taskdesc):
    run = job['run']
    if run['checkout']:
        support_vcs_checkout(config, job, taskdesc,
                             sparse=bool(run['sparse-profile']))

    taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
예제 #7
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['caches'] = []

    worker['artifacts'].append({
        'name': 'public',
        'path': '/home/worker/workspace/artifacts/',
        'type': 'directory',
    })

    docker_worker_add_tc_vcs_cache(config, job, taskdesc)
    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
    })

    # tooltool downloads.  By default we download using the API endpoint, but
    # the job can optionally request relengapi-proxy (for example when downloading
    # internal tooltool resources.  So we define the tooltool cache unconditionally.
    worker['caches'].append({
        'type': 'persistent',
        'name': 'tooltool-cache',
        'mount-point': '/home/worker/tooltool-cache',
    })
    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'

    # tooltool downloads
    worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
    if run['tooltool-downloads']:
        worker['relengapi-proxy'] = True
        taskdesc['scopes'].extend([
            'docker-worker:relengapi-proxy:tooltool.download.public',
        ])
        if run['tooltool-downloads'] == 'internal':
            taskdesc['scopes'].append(
                'docker-worker:relengapi-proxy:tooltool.download.internal')

    worker['command'] = [
        '/home/worker/bin/run-task',
        '--vcs-checkout=/home/worker/workspace/build/src',
        '--',
        'bash',
        '-c',
        'cd /home/worker && '
        './workspace/build/src/taskcluster/scripts/misc/{}'.format(
            run['script'])
    ]

    add_optimizations(config, run, taskdesc)
예제 #8
0
def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['caches'] = []

    if int(config.params['level']) > 1:
        worker['caches'].append({
            'type':
            'persistent',
            'name':
            'level-{}-{}-build-spidermonkey-workspace'.format(
                config.params['level'], config.params['project']),
            'mount-point':
            "/home/worker/workspace",
        })

    docker_worker_add_public_artifacts(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # tooltool downloads; note that this script downloads using the API
    # endpoiint directly, rather than via relengapi-proxy
    worker['caches'].append({
        'type': 'persistent',
        'name': 'tooltool-cache',
        'mount-point': '/home/worker/tooltool-cache',
    })
    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
    if run.get('tooltool-manifest'):
        env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']

    support_vcs_checkout(config, job, taskdesc)

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"

    worker['command'] = [
        '/home/worker/bin/run-task', '--chown-recursive',
        '/home/worker/workspace', '--chown-recursive',
        '/home/worker/tooltool-cache', '--vcs-checkout',
        '/home/worker/workspace/build/src', '--', '/bin/bash', '-c',
        'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s'
        % script
    ]
예제 #9
0
def windows_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']

    worker['artifacts'] = [{
        'path': r'public\build',
        'type': 'directory',
    }]
    worker['chain-of-trust'] = True

    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
    })

    hg_command = generic_worker_hg_commands(
        'https://hg.mozilla.org/mozilla-unified',
        env['GECKO_HEAD_REPOSITORY'],
        env['GECKO_HEAD_REV'],
        r'.\build\src')[0]

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        raise NotImplementedError("Python scripts don't work on Windows")

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    bash = r'c:\mozilla-build\msys\bin\bash'
    worker['command'] = [
        hg_command,
        # do something intelligent.
        r'{} build/src/taskcluster/scripts/misc/{}{}'.format(
            bash, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        add_optimization(
            config, taskdesc,
            cache_type=CACHE_TYPE,
            cache_name=name,
            digest_data=get_digest_data(config, run, taskdesc),
        )
예제 #10
0
def generic_worker_spidermonkey(config, job, taskdesc):
    assert job['worker']['os'] == 'windows', 'only supports windows right now'

    run = job['run']

    worker = taskdesc['worker']

    generic_worker_add_artifacts(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'SCCACHE_DISABLE': "1",
        'WORK': ".",  # Override the defaults in build scripts
        'SRCDIR': "./src",  # with values suiteable for windows generic worker
        'UPLOAD_DIR': "./public/build"
    })

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
        # Don't allow untested configurations yet
        raise Exception(
            "spidermonkey-package is not a supported configuration")
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
        # Don't allow untested configurations yet
        raise Exception(
            "spidermonkey-mozjs-crate is not a supported configuration")
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"
        # Don't allow untested configurations yet
        raise Exception(
            "spidermonkey-rust-bindings is not a supported configuration")

    hg_command = generic_worker_hg_commands(
        'https://hg.mozilla.org/mozilla-unified',
        env['GECKO_HEAD_REPOSITORY'],
        env['GECKO_HEAD_REV'],
        r'.\src',
    )[0]

    command = [
        'c:\\mozilla-build\\msys\\bin\\bash.exe '  # string concat
        '"./src/taskcluster/scripts/builder/%s"' % script
    ]

    worker['command'] = [
        hg_command,
        ' '.join(command),
    ]
예제 #11
0
def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['caches'] = []

    if int(config.params['level']) > 1:
        worker['caches'].append({
            'type': 'persistent',
            'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
                config.params['level'], config.params['project']),
            'mount-point': "/home/worker/workspace",
        })

    docker_worker_add_public_artifacts(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # tooltool downloads; note that this script downloads using the API
    # endpoiint directly, rather than via relengapi-proxy
    worker['caches'].append({
        'type': 'persistent',
        'name': 'tooltool-cache',
        'mount-point': '/home/worker/tooltool-cache',
    })
    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
    if run.get('tooltool-manifest'):
        env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']

    support_vcs_checkout(config, job, taskdesc)

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"

    worker['command'] = [
        '/home/worker/bin/run-task',
        '--chown-recursive', '/home/worker/workspace',
        '--chown-recursive', '/home/worker/tooltool-cache',
        '--vcs-checkout', '/home/worker/workspace/build/src',
        '--',
        '/bin/bash',
        '-c',
        'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
    ]
예제 #12
0
파일: run_task.py 프로젝트: gsnedders/gecko
def common_setup(config, job, taskdesc, command, checkoutdir):
    run = job['run']
    if run['checkout']:
        support_vcs_checkout(config, job, taskdesc,
                             sparse=bool(run['sparse-profile']))
        command.append('--vcs-checkout={}/gecko'.format(checkoutdir))

    if run['sparse-profile']:
        command.append('--sparse-profile=build/sparse-profiles/%s' %
                       run['sparse-profile'])

    taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
예제 #13
0
def docker_worker_spidermonkey(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker.setdefault('caches', []).append({
        'type':
        'persistent',
        'name':
        '{}-build-spidermonkey-workspace'.format(config.params['project']),
        'mount-point':
        "{workdir}/workspace".format(**run),
        'skip-untrusted':
        True,
    })

    docker_worker_add_artifacts(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })
    if 'spidermonkey-platform' in run:
        env['SPIDERMONKEY_PLATFORM'] = run['spidermonkey-platform']

    support_vcs_checkout(config, job, taskdesc)

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    worker['command'] = [
        '{workdir}/bin/run-task'.format(**run), '--gecko-checkout',
        '{workdir}/workspace/build/src'.format(**run), '--', '/bin/bash', '-c',
        'cd {workdir} && workspace/build/src/taskcluster/scripts/builder/{script}'
        .format(workdir=run['workdir'], script=script)
    ]
예제 #14
0
def docker_worker_hazard(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['caches'] = []

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc)
    docker_worker_setup_secrets(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # script parameters
    if run.get('tooltool-manifest'):
        env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
    if run.get('mozconfig'):
        env['MOZCONFIG'] = run['mozconfig']

    # tooltool downloads
    worker['caches'].append({
        'type': 'persistent',
        'name': 'tooltool-cache',
        'mount-point': '/home/worker/tooltool-cache',
    })
    worker['relengapi-proxy'] = True
    taskdesc['scopes'].extend([
        'docker-worker:relengapi-proxy:tooltool.download.public',
    ])
    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'

    # build-haz-linux.sh needs this otherwise it assumes the checkout is in
    # the workspace.
    env['GECKO_DIR'] = '/home/worker/checkouts/gecko'

    worker['command'] = [
        '/home/worker/bin/run-task',
        '--chown-recursive', '/home/worker/tooltool-cache',
        '--chown-recursive', '/home/worker/workspace',
        '--vcs-checkout', '/home/worker/checkouts/gecko',
        '--',
        '/bin/bash', '-c', run['command']
    ]
예제 #15
0
def common_setup(config, job, taskdesc, command):
    run = job["run"]
    if run["checkout"]:
        support_vcs_checkout(config,
                             job,
                             taskdesc,
                             sparse=bool(run["sparse-profile"]))
        command.append("--gecko-checkout={}".format(
            taskdesc["worker"]["env"]["GECKO_PATH"]))

    if run["sparse-profile"]:
        command.append("--gecko-sparse-profile=build/sparse-profiles/%s" %
                       run["sparse-profile"])

    taskdesc["worker"].setdefault("env",
                                  {})["MOZ_SCM_LEVEL"] = config.params["level"]
예제 #16
0
def docker_worker_hazard(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['caches'] = []

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc)
    docker_worker_setup_secrets(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # script parameters
    if run.get('tooltool-manifest'):
        env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
    if run.get('mozconfig'):
        env['MOZCONFIG'] = run['mozconfig']

    # tooltool downloads
    worker['caches'].append({
        'type': 'persistent',
        'name': 'tooltool-cache',
        'mount-point': '/home/worker/tooltool-cache',
    })
    worker['relengapi-proxy'] = True
    taskdesc['scopes'].extend([
        'docker-worker:relengapi-proxy:tooltool.download.public',
    ])
    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'

    # build-haz-linux.sh needs this otherwise it assumes the checkout is in
    # the workspace.
    env['GECKO_DIR'] = '/home/worker/checkouts/gecko'

    worker['command'] = [
        '/home/worker/bin/run-task', '--chown-recursive',
        '/home/worker/tooltool-cache', '--chown-recursive',
        '/home/worker/workspace', '--vcs-checkout',
        '/home/worker/checkouts/gecko', '--', '/bin/bash', '-c', run['command']
    ]
예제 #17
0
def docker_worker_spidermonkey(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker.setdefault('caches', []).append({
        'type':
        'persistent',
        'name':
        'level-{}-{}-build-spidermonkey-workspace'.format(
            config.params['level'], config.params['project']),
        'mount-point':
        "/builds/worker/workspace",
        'skip-untrusted':
        True,
    })

    docker_worker_add_artifacts(config, job, taskdesc)
    docker_worker_add_tooltool(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    support_vcs_checkout(config, job, taskdesc)

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"

    worker['command'] = [
        '/builds/worker/bin/run-task', '--vcs-checkout',
        '/builds/worker/workspace/build/src', '--', '/bin/bash', '-c',
        'cd /builds/worker && workspace/build/src/taskcluster/scripts/builder/%s'
        % script
    ]
예제 #18
0
def common_setup(config, job, taskdesc, command):
    run = job["run"]
    if run["checkout"]:
        support_vcs_checkout(config,
                             job,
                             taskdesc,
                             sparse=bool(run["sparse-profile"]))
        command.append("--gecko-checkout={}".format(
            taskdesc["worker"]["env"]["GECKO_PATH"]))

    if run["sparse-profile"]:
        sparse_profile_prefix = run.pop("sparse-profile-prefix",
                                        "build/sparse-profiles")
        sparse_profile_path = path.join(sparse_profile_prefix,
                                        run["sparse-profile"])
        command.append("--gecko-sparse-profile={}".format(sparse_profile_path))

    taskdesc["worker"].setdefault("env",
                                  {})["MOZ_SCM_LEVEL"] = config.params["level"]
예제 #19
0
def docker_worker_spidermonkey(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker.setdefault('caches', []).append({
        'type': 'persistent',
        'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
            config.params['level'], config.params['project']),
        'mount-point': "/builds/worker/workspace",
        'skip-untrusted': True,
    })

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_tooltool(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    support_vcs_checkout(config, job, taskdesc)

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"

    worker['command'] = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout', '/builds/worker/workspace/build/src',
        '--',
        '/bin/bash',
        '-c',
        'cd /builds/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
    ]
예제 #20
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']
    taskdesc['run-on-projects'] = ['trunk', 'try']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['chain-of-trust'] = True

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_tc_vcs_cache(config, job, taskdesc)
    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
        'MOZ_AUTOMATION': '1',
    })

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    worker['command'] = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout=/builds/worker/workspace/build/src', '--', 'bash',
        '-c', 'cd /builds/worker && '
        './workspace/build/src/taskcluster/scripts/misc/{}'.format(
            run['script'])
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    add_optimizations(config, run, taskdesc)
예제 #21
0
def common_setup(config, job, taskdesc, command):
    run = job["run"]
    if run["checkout"]:
        repo_configs = config.repo_configs
        if len(repo_configs) > 1 and run["checkout"] is True:
            raise Exception(
                "Must explicitly sepcify checkouts with multiple repos.")
        elif run["checkout"] is not True:
            repo_configs = {
                repo: attr.evolve(repo_configs[repo], **config)
                for (repo, config) in run["checkout"].items()
            }

        vcs_path = support_vcs_checkout(
            config,
            job,
            taskdesc,
            repo_configs=repo_configs,
            sparse=bool(run["sparse-profile"]),
        )

        vcs_path = taskdesc["worker"]["env"]["VCS_PATH"]
        for repo_config in repo_configs.values():
            checkout_path = path.join(vcs_path, repo_config.path)
            command.append(f"--{repo_config.prefix}-checkout={checkout_path}")

        if run["sparse-profile"]:
            command.append(
                "--{}-sparse-profile=build/sparse-profiles/{}".format(
                    repo_config.prefix,
                    run["sparse-profile"],
                ))

        if "cwd" in run:
            run["cwd"] = path.normpath(run["cwd"].format(checkout=vcs_path))
    elif "cwd" in run and "{checkout}" in run["cwd"]:
        raise Exception(
            "Found `{{checkout}}` interpolation in `cwd` for task {name} "
            "but the task doesn't have a checkout: {cwd}".format(
                cwd=run["cwd"], name=job.get("name", job.get("label"))))

    if "cwd" in run:
        command.extend(("--task-cwd", run["cwd"]))

    taskdesc["worker"].setdefault("env",
                                  {})["MOZ_SCM_LEVEL"] = config.params["level"]
예제 #22
0
def mozharness_on_generic_worker(config, job, taskdesc):
    assert job['worker']['os'] == 'windows', 'only supports windows right now'

    run = job['run']

    # fail if invalid run options are included
    invalid = []
    for prop in [
            'tooltool-downloads', 'secrets', 'taskcluster-proxy', 'need-xvfb'
    ]:
        if prop in run and run[prop]:
            invalid.append(prop)
    if not run.get('keep-artifacts', True):
        invalid.append('keep-artifacts')
    if invalid:
        raise Exception(
            "Jobs run using mozharness on Windows do not support properties " +
            ', '.join(invalid))

    worker = taskdesc['worker']

    taskdesc['worker'].setdefault('artifacts', []).append({
        'name': 'public/logs',
        'path': 'logs',
        'type': 'directory'
    })
    if not worker.get('skip-artifacts', False):
        generic_worker_add_artifacts(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
        'MH_BRANCH': config.params['project'],
        'MOZ_SOURCE_CHANGESET': env['GECKO_HEAD_REV'],
    })
    if run['use-simple-package']:
        env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'})

    if 'extra-config' in run:
        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config'])

    # The windows generic worker uses batch files to pass environment variables
    # to commands.  Setting a variable to empty in a batch file unsets, so if
    # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
    # mozharness doesn't try to find the commit message on its own.
    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message'] or 'no commit message'

    if run['comm-checkout']:
        env['MOZ_SOURCE_CHANGESET'] = env['COMM_HEAD_REV']

    if not job['attributes']['build_platform'].startswith('win'):
        raise Exception(
            "Task generation for mozharness build jobs currently only supported on Windows"
        )

    mh_command = [r'c:\mozilla-build\python\python.exe']
    mh_command.append('\\'.join(
        [r'.\build\src\testing', run['script'].replace('/', '\\')]))

    if 'config-paths' in run:
        for path in run['config-paths']:
            mh_command.append(r'--extra-config-path '
                              r'.\build\src\{}'.format(path.replace('/',
                                                                    '\\')))

    for cfg in run['config']:
        mh_command.append('--config ' + cfg.replace('/', '\\'))
    if run['use-magic-mh-args']:
        mh_command.append('--branch ' + config.params['project'])
    mh_command.append(r'--work-dir %cd:Z:=z:%\build')
    for action in run.get('actions', []):
        mh_command.append('--' + action)

    for option in run.get('options', []):
        mh_command.append('--' + option)
    if run.get('custom-build-variant-cfg'):
        mh_command.append('--custom-build-variant')
        mh_command.append(run['custom-build-variant-cfg'])

    hg_commands = generic_worker_hg_commands(
        base_repo=env['GECKO_BASE_REPOSITORY'],
        head_repo=env['GECKO_HEAD_REPOSITORY'],
        head_rev=env['GECKO_HEAD_REV'],
        path=r'.\build\src',
    )

    if run['comm-checkout']:
        hg_commands.extend(
            generic_worker_hg_commands(base_repo=env['COMM_BASE_REPOSITORY'],
                                       head_repo=env['COMM_HEAD_REPOSITORY'],
                                       head_rev=env['COMM_HEAD_REV'],
                                       path=r'.\build\src\comm'))

    fetch_commands = []
    if 'MOZ_FETCHES' in env:
        # When Bug 1436037 is fixed, run-task can be used for this task,
        # and this call can go away
        fetch_commands.append(' '.join([
            r'c:\mozilla-build\python3\python3.exe',
            r'build\src\taskcluster\scripts\misc\fetch-content',
            'task-artifacts',
        ]))

    worker['command'] = []
    if taskdesc.get('needs-sccache'):
        worker['command'].extend([
            # Make the comment part of the first command, as it will help users to
            # understand what is going on, and why these steps are implemented.
            dedent('''\
            :: sccache currently uses the full compiler commandline as input to the
            :: cache hash key, so create a symlink to the task dir and build from
            :: the symlink dir to get consistent paths.
            if exist z:\\build rmdir z:\\build'''),
            r'mklink /d z:\build %cd%',
            # Grant delete permission on the link to everyone.
            r'icacls z:\build /grant *S-1-1-0:D /L',
            r'cd /d z:\build',
        ])

    worker['command'].extend(hg_commands)
    worker['command'].extend(fetch_commands)
    worker['command'].extend([' '.join(mh_command)])
예제 #23
0
def mozharness_on_docker_worker_setup(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['implementation'] = job['worker']['implementation']

    if not run['use-simple-package']:
        raise NotImplementedError("Simple packaging cannot be disabled via"
                                  "'use-simple-package' on docker-workers")
    if not run['use-magic-mh-args']:
        raise NotImplementedError("Cannot disabled mh magic arg passing via"
                                  "'use-magic-mh-args' on docker-workers")

    # Running via mozharness assumes an image that contains build.sh:
    # by default, debian7-amd64-build, but it could be another image (like
    # android-build).
    taskdesc['worker'].setdefault('docker-image',
                                  {'in-tree': 'debian7-amd64-build'})

    taskdesc['worker'].setdefault('artifacts', []).append({
        'name':
        'public/logs',
        'path':
        '{workdir}/logs/'.format(**run),
        'type':
        'directory'
    })
    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')
    docker_worker_add_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(
        config, job, taskdesc, extra=run.get('extra-workspace-cache-key'))
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'GECKO_PATH': '{workdir}/workspace/build/src'.format(**run),
        'MOZHARNESS_CONFIG': ' '.join(run['config']),
        'MOZHARNESS_SCRIPT': run['script'],
        'MH_BRANCH': config.params['project'],
        'MOZ_SOURCE_CHANGESET': env['GECKO_HEAD_REV'],
        'MH_BUILD_POOL': 'taskcluster',
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
        'PYTHONUNBUFFERED': '1',
    })

    if 'actions' in run:
        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])

    if 'options' in run:
        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])

    if 'config-paths' in run:
        env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run['config-paths'])

    if 'custom-build-variant-cfg' in run:
        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']

    if 'extra-config' in run:
        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config'])

    if 'job-script' in run:
        env['JOB_SCRIPT'] = run['job-script']

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    if run['comm-checkout']:
        env['MOZ_SOURCE_CHANGESET'] = env['COMM_HEAD_REV']

    # if we're not keeping artifacts, set some env variables to empty values
    # that will cause the build process to skip copying the results to the
    # artifacts directory.  This will have no effect for operations that are
    # not builds.
    if not run['keep-artifacts']:
        env['DIST_TARGET_UPLOADS'] = ''
        env['DIST_UPLOADS'] = ''

    # Xvfb
    if run['need-xvfb']:
        env['NEED_XVFB'] = 'true'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    # Retry if mozharness returns TBPL_RETRY
    worker['retry-exit-status'] = [4]

    docker_worker_setup_secrets(config, job, taskdesc)

    command = [
        '{workdir}/bin/run-task'.format(**run),
        '--gecko-checkout',
        env['GECKO_PATH'],
    ]
    if run['comm-checkout']:
        command.append(
            '--comm-checkout={workdir}/workspace/build/src/comm'.format(**run))

    command += [
        '--',
        '{workdir}/workspace/build/src/{script}'.format(
            workdir=run['workdir'],
            script=run.get('job-script',
                           'taskcluster/scripts/builder/build-linux.sh'),
        ),
    ]

    worker['command'] = command
예제 #24
0
def mozharness_on_docker_worker_setup(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['implementation'] = job['worker']['implementation']

    if not run['use-simple-package']:
        raise NotImplementedError("Simple packaging cannot be disabled via"
                                  "'use-simple-package' on docker-workers")
    if not run['use-magic-mh-args']:
        raise NotImplementedError("Cannot disabled mh magic arg passing via"
                                  "'use-magic-mh-args' on docker-workers")

    # Running via mozharness assumes an image that contains build.sh:
    # by default, desktop-build, but it could be another image (like
    # android-gradle-build) that "inherits" from desktop-build.
    if not taskdesc['worker']['docker-image']:
        taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}

    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(
        config, job, taskdesc, extra=run.get('extra-workspace-cache-key'))
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(run['config']),
        'MOZHARNESS_SCRIPT': run['script'],
        'MH_BRANCH': config.params['project'],
        'MH_BUILD_POOL': 'taskcluster',
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
    })

    if 'actions' in run:
        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])

    if 'options' in run:
        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])

    if 'custom-build-variant-cfg' in run:
        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']

    if 'job-script' in run:
        env['JOB_SCRIPT'] = run['job-script']

    # if we're not keeping artifacts, set some env variables to empty values
    # that will cause the build process to skip copying the results to the
    # artifacts directory.  This will have no effect for operations that are
    # not builds.
    if not run['keep-artifacts']:
        env['DIST_TARGET_UPLOADS'] = ''
        env['DIST_UPLOADS'] = ''

    # Xvfb
    if run['need-xvfb']:
        env['NEED_XVFB'] = 'true'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    # Retry if mozharness returns TBPL_RETRY
    worker['retry-exit-status'] = 4

    docker_worker_setup_secrets(config, job, taskdesc)

    command = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout',
        '/builds/worker/workspace/build/src',
        '--tools-checkout',
        '/builds/worker/workspace/build/tools',
        '--',
        '/builds/worker/workspace/build/src/{}'.format(
            run.get('job-script',
                    'taskcluster/scripts/builder/build-linux.sh')),
    ]

    worker['command'] = command
예제 #25
0
def windows_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']

    worker['artifacts'] = [{
        'path': r'public\build',
        'type': 'directory',
    }]
    worker['chain-of-trust'] = True

    # There were no caches on generic-worker before bug 1519472, and they cause
    # all sorts of problems with toolchain tasks, disable them until
    # tasks are ready.
    run['use-caches'] = False
    support_vcs_checkout(config, job, taskdesc, sparse=('sparse-profile' in run))

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
    })

    sparse_profile = run.get('sparse-profile')
    if sparse_profile:
        sparse_profile = 'build/sparse-profiles/{}'.format(run['sparse-profile'])

    hg_command = generic_worker_hg_commands(
        'https://hg.mozilla.org/mozilla-unified',
        env['GECKO_HEAD_REPOSITORY'],
        env['GECKO_HEAD_REV'],
        r'.\build\src', sparse_profile=sparse_profile)[0]

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        raise NotImplementedError("Python scripts don't work on Windows")

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    bash = r'c:\mozilla-build\msys\bin\bash'
    worker['command'] = [
        hg_command,
        # do something intelligent.
        r'{} build/src/taskcluster/scripts/misc/{}{}'.format(
            bash, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        taskdesc['cache'] = {
            'type': CACHE_TYPE,
            'name': name,
            'digest-data': get_digest_data(config, run, taskdesc),
        }
예제 #26
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['chain-of-trust'] = True

    # Allow the job to specify where artifacts come from, but add
    # public/build if it's not there already.
    artifacts = worker.setdefault('artifacts', [])
    if not any(
            artifact.get('name') == 'public/build' for artifact in artifacts):
        docker_worker_add_public_artifacts(config, job, taskdesc)

    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc, sparse=True)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
        'MOZ_AUTOMATION': '1',
    })

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        wrapper = 'workspace/build/src/mach python '
    else:
        wrapper = ''

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    sparse_profile = []
    if run.get('sparse-profile'):
        sparse_profile = [
            '--sparse-profile',
            'build/sparse-profiles/{}'.format(run['sparse-profile'])
        ]

    worker['command'] = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout=/builds/worker/workspace/build/src',
    ] + sparse_profile + [
        '--', 'bash', '-c', 'cd /builds/worker && '
        '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format(
            wrapper, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        add_optimization(
            config,
            taskdesc,
            cache_type=CACHE_TYPE,
            cache_name=name,
            digest_data=get_digest_data(config, run, taskdesc),
        )
예제 #27
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['chain-of-trust'] = True

    # Allow the job to specify where artifacts come from, but add
    # public/build if it's not there already.
    artifacts = worker.setdefault('artifacts', [])
    if not any(artifact.get('name') == 'public/build' for artifact in artifacts):
        docker_worker_add_public_artifacts(config, job, taskdesc)

    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc, sparse=True)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
        'MOZ_AUTOMATION': '1',
    })

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        wrapper = 'workspace/build/src/mach python '
    else:
        wrapper = ''

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    sparse_profile = []
    if run.get('sparse-profile'):
        sparse_profile = ['--sparse-profile',
                          'build/sparse-profiles/{}'.format(run['sparse-profile'])]

    worker['command'] = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout=/builds/worker/workspace/build/src',
    ] + sparse_profile + [
        '--',
        'bash',
        '-c',
        'cd /builds/worker && '
        '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format(
            wrapper, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        add_optimization(
            config, taskdesc,
            cache_type=CACHE_TYPE,
            cache_name=name,
            digest_data=get_digest_data(config, run, taskdesc),
        )
예제 #28
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['chain-of-trust'] = True

    # If the task doesn't have a docker-image, set a default
    worker.setdefault('docker-image', {'in-tree': 'toolchain-build'})

    # Allow the job to specify where artifacts come from, but add
    # public/build if it's not there already.
    artifacts = worker.setdefault('artifacts', [])
    if not any(artifact.get('name') == 'public/build' for artifact in artifacts):
        docker_worker_add_artifacts(config, job, taskdesc)

    support_vcs_checkout(config, job, taskdesc, sparse=True)

    # Toolchain checkouts don't live under {workdir}/checkouts
    workspace = '{workdir}/workspace/build'.format(**run)
    gecko_path = '{}/src'.format(workspace)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
        'MOZ_AUTOMATION': '1',
        'MOZ_FETCHES_DIR': workspace,
        'GECKO_PATH': gecko_path,
    })

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        wrapper = '{}/mach python '.format(gecko_path)
    else:
        wrapper = ''

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    sparse_profile = []
    if run.get('sparse-profile'):
        sparse_profile = ['--gecko-sparse-profile=build/sparse-profiles/{}'
                          .format(run['sparse-profile'])]

    worker['command'] = [
        '{workdir}/bin/run-task'.format(**run),
        '--gecko-checkout={}'.format(gecko_path),
    ] + sparse_profile + [
        '--',
        'bash',
        '-c',
        'cd {} && '
        '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format(
            run['workdir'], wrapper, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        taskdesc['cache'] = {
            'type': CACHE_TYPE,
            'name': name,
            'digest-data': get_digest_data(config, run, taskdesc),
        }
예제 #29
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    if 'android-em-7.0-x86' in test['test-platform']:
        worker['privileged'] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        ("public/test_info/",
         "{workdir}/workspace/build/blobber_upload_dir/".format(**run)),
    ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        installer_url = get_artifact_url('<build>',
                                         mozharness['build-artifact-name'])

    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('{workdir}/workspace'.format(**run), path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'NEED_COMPIZ': 'true',
        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
        'MOZ_AUTOMATION': '1',
        'WORKING_DIR': '/builds/worker',
    })

    # by default, require compiz unless proven otherwise, hence a whitelist.
    # See https://bugzilla.mozilla.org/show_bug.cgi?id=1552563
    # if using regex this list can be shortened greatly.
    suites_not_need_compiz = [
        'mochitest-webgl1-core', 'mochitest-webgl1-ext', 'mochitest-plain-gpu',
        'mochitest-browser-chrome-screenshots', 'gtest', 'cppunittest',
        'jsreftest', 'crashtest', 'reftest', 'reftest-no-accel',
        'web-platform-tests', 'web-platform-tests-reftests', 'xpcshell'
    ]
    if job['run']['test']['suite'] in suites_not_need_compiz or (
            job['run']['test']['suite'] == 'mochitest-plain-chunked'
            and job['run']['test']['try-name'] == 'mochitest-plain-headless'):
        env['NEED_COMPIZ'] = 'false'

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options

    if mozharness['tooltool-downloads']:
        internal = mozharness['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # assemble the command line
    command = [
        '{workdir}/bin/run-task'.format(**run),
    ]

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        command.extend(
            ['--gecko-checkout', '{workdir}/checkouts/gecko'.format(**run)])
        env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(
            **run)
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': json.dumps(extra_config)
    }

    command.extend([
        '--',
        '{workdir}/bin/test-linux.sh'.format(**run),
    ])
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            command.append('--total-chunk={}'.format(test['chunks']))
            command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    command[i] += suffix

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    worker['command'] = command
예제 #30
0
def mozharness_on_docker_worker_setup(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['implementation'] = job['worker']['implementation']

    # running via mozharness assumes desktop-build (which contains build.sh)
    taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}

    worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(run['config']),
        'MOZHARNESS_SCRIPT': run['script'],
        'MH_BRANCH': config.params['project'],
        'MH_BUILD_POOL': 'taskcluster',
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    if 'actions' in run:
        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])

    if 'options' in run:
        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])

    if 'custom-build-variant-cfg' in run:
        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']

    if 'job-script' in run:
        env['JOB_SCRIPT'] = run['job-script']

    # if we're not keeping artifacts, set some env variables to empty values
    # that will cause the build process to skip copying the results to the
    # artifacts directory.  This will have no effect for operations that are
    # not builds.
    if not run['keep-artifacts']:
        env['DIST_TARGET_UPLOADS'] = ''
        env['DIST_UPLOADS'] = ''

    # Xvfb
    if run['need-xvfb']:
        env['NEED_XVFB'] = 'true'

    # tooltool downloads
    if run['tooltool-downloads']:
        worker['relengapi-proxy'] = True
        worker['caches'].append({
            'type': 'persistent',
            'name': 'tooltool-cache',
            'mount-point': '/home/worker/tooltool-cache',
        })
        taskdesc['scopes'].extend([
            'docker-worker:relengapi-proxy:tooltool.download.public',
        ])
        if run['tooltool-downloads'] == 'internal':
            taskdesc['scopes'].append(
                'docker-worker:relengapi-proxy:tooltool.download.internal')
        env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'

    # Retry if mozharness returns TBPL_RETRY
    worker['retry-exit-status'] = 4

    docker_worker_setup_secrets(config, job, taskdesc)

    command = [
        '/home/worker/bin/run-task',
        # Various caches/volumes are default owned by root:root.
        '--chown-recursive', '/home/worker/workspace',
        '--chown-recursive', '/home/worker/tooltool-cache',
        '--vcs-checkout', '/home/worker/workspace/build/src',
        '--tools-checkout', '/home/worker/workspace/build/tools',
        '--',
    ]
    command.append("/home/worker/workspace/build/src/{}".format(
        run.get('job-script',
                "taskcluster/scripts/builder/build-linux.sh"
                )))

    worker['command'] = command
예제 #31
0
def mozharness_test_on_docker(config, job, taskdesc):
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "/builds/worker/workspace/build/upload/logs/"),
        ("public/test", "/builds/worker/artifacts/"),
        ("public/test_info/", "/builds/worker/workspace/build/blobber_upload_dir/"),
    ]

    installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
    mozharness_url = get_artifact_url('<build>',
                                      'public/build/mozharness.zip')

    worker['artifacts'] = [{
        'name': prefix,
        'path': os.path.join('/builds/worker/workspace', path),
        'type': 'directory',
    } for (prefix, path) in artifacts]

    worker['caches'] = [{
        'type': 'persistent',
        'name': 'level-{}-{}-test-workspace'.format(
            config.params['level'], config.params['project']),
        'mount-point': "/builds/worker/workspace",
    }]

    env = worker['env'] = {
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {'task-reference': installer_url},
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
        'MOZ_AUTOMATION': '1',
    }

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options

    if mozharness['tooltool-downloads']:
        docker_worker_add_tooltool(config, job, taskdesc, internal=True)

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot']))

    # assemble the command line
    command = [
        '/builds/worker/bin/run-task',
    ]

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        command.extend(['--vcs-checkout', '/builds/worker/checkouts/gecko'])
        env['MOZHARNESS_PATH'] = '/builds/worker/checkouts/gecko/testing/mozharness'
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    command.extend([
        '--',
        '/builds/worker/bin/test-linux.sh',
    ])

    if mozharness.get('no-read-buildbot-config'):
        command.append("--no-read-buildbot-config")
    command.extend([
        {"task-reference": "--installer-url=" + installer_url},
        {"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)},
    ])
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            command.append('--total-chunk={}'.format(test['chunks']))
            command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    command[i] += suffix

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    worker['command'] = command
예제 #32
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job["run"]
    test = taskdesc["run"]["test"]
    mozharness = test["mozharness"]
    worker = taskdesc["worker"] = job["worker"]

    # apply some defaults
    worker["docker-image"] = test["docker-image"]
    worker["allow-ptrace"] = True  # required for all tests, for crashreporter
    worker["loopback-video"] = test["loopback-video"]
    worker["loopback-audio"] = test["loopback-audio"]
    worker["max-run-time"] = test["max-run-time"]
    worker["retry-exit-status"] = test["retry-exit-status"]
    if "android-em-7.0-x86" in test["test-platform"]:
        worker["privileged"] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        (
            "public/test_info/",
            "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
        ),
    ]

    installer = installer_url(taskdesc)

    mozharness_url = get_artifact_url(
        "<build>", get_artifact_path(taskdesc, "mozharness.zip"))

    worker.setdefault("artifacts", [])
    worker["artifacts"].extend([{
        "name":
        prefix,
        "path":
        os.path.join("{workdir}/workspace".format(**run), path),
        "type":
        "directory",
    } for (prefix, path) in artifacts])

    env = worker.setdefault("env", {})
    env.update({
        "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
        "MOZHARNESS_SCRIPT": mozharness["script"],
        "MOZILLA_BUILD_URL": {
            "task-reference": installer
        },
        "NEED_PULSEAUDIO": "true",
        "NEED_WINDOW_MANAGER": "true",
        "ENABLE_E10S": text_type(bool(test.get("e10s"))).lower(),
        "WORKING_DIR": "/builds/worker",
    })

    if test.get("python-3"):
        env["PYTHON"] = "python3"

    # Legacy linux64 tests rely on compiz.
    if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
        env.update({"NEED_COMPIZ": "true"})

    # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
    # when manipulating windows.
    if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
        if "wdspec" in job["run"]["test"]["suite"] or (
                "marionette" in job["run"]["test"]["suite"]
                and "headless" not in job["label"]):
            env.update({"NEED_COMPIZ": "true"})

    if mozharness.get("mochitest-flavor"):
        env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]

    if mozharness["set-moz-node-path"]:
        env["MOZ_NODE_PATH"] = "/usr/local/bin/node"

    if "actions" in mozharness:
        env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])

    if config.params.is_try():
        env["TRY_COMMIT_MSG"] = config.params["message"]

    # handle some of the mozharness-specific options
    if test["reboot"]:
        raise Exception("reboot: {} not supported on generic-worker".format(
            test["reboot"]))

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test["checkout"]:
        env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
            **run)
    else:
        env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}

    extra_config = {
        "installer_url": installer,
        "test_packages_url": test_packages_url(taskdesc),
    }
    env["EXTRA_MOZHARNESS_CONFIG"] = {
        "task-reference":
        six.ensure_text(json.dumps(extra_config, sort_keys=True))
    }

    # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
    # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
    if "web-platform-tests" in test["suite"] or re.match(
            "test-(coverage|verify)-wpt", test["suite"]):
        env["TESTS_BY_MANIFEST_URL"] = {
            "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
        }

    command = [
        "{workdir}/bin/test-linux.sh".format(**run),
    ]
    command.extend(mozharness.get("extra-options", []))

    if test.get("test-manifests"):
        env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
            json.dumps({test["suite"]: test["test-manifests"]},
                       sort_keys=True))

    # TODO: remove the need for run['chunked']
    elif mozharness.get("chunked") or test["chunks"] > 1:
        command.append("--total-chunk={}".format(test["chunks"]))
        command.append("--this-chunk={}".format(test["this-chunk"]))

    if "download-symbols" in mozharness:
        download_symbols = mozharness["download-symbols"]
        download_symbols = {
            True: "true",
            False: "false"
        }.get(download_symbols, download_symbols)
        command.append("--download-symbols=" + download_symbols)

    job["run"] = {
        "workdir": run["workdir"],
        "tooltool-downloads": mozharness["tooltool-downloads"],
        "checkout": test["checkout"],
        "command": command,
        "using": "run-task",
    }
    configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
예제 #33
0
def common_setup(config, job, taskdesc):
    run = job['run']
    if run['checkout']:
        support_vcs_checkout(config, job, taskdesc)
예제 #34
0
def mozharness_on_docker_worker_setup(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['implementation'] = job['worker']['implementation']

    if not run['use-simple-package']:
        raise NotImplementedError("Simple packaging cannot be disabled via"
                                  "'use-simple-package' on docker-workers")
    if not run['use-magic-mh-args']:
        raise NotImplementedError("Cannot disabled mh magic arg passing via"
                                  "'use-magic-mh-args' on docker-workers")

    # Running via mozharness assumes an image that contains build.sh:
    # by default, debian7-amd64-build, but it could be another image (like
    # android-build).
    taskdesc['worker'].setdefault('docker-image', {'in-tree': 'debian7-amd64-build'})

    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc,
                                      extra=run.get('extra-workspace-cache-key'))
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(run['config']),
        'MOZHARNESS_SCRIPT': run['script'],
        'MH_BRANCH': config.params['project'],
        'MH_BUILD_POOL': 'taskcluster',
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
    })

    if 'actions' in run:
        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])

    if 'options' in run:
        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])

    if 'config-paths' in run:
        env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run['config-paths'])

    if 'custom-build-variant-cfg' in run:
        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']

    if 'extra-config' in run:
        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config'])

    if 'job-script' in run:
        env['JOB_SCRIPT'] = run['job-script']

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # if we're not keeping artifacts, set some env variables to empty values
    # that will cause the build process to skip copying the results to the
    # artifacts directory.  This will have no effect for operations that are
    # not builds.
    if not run['keep-artifacts']:
        env['DIST_TARGET_UPLOADS'] = ''
        env['DIST_UPLOADS'] = ''

    # Xvfb
    if run['need-xvfb']:
        env['NEED_XVFB'] = 'true'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        docker_worker_add_tooltool(config, job, taskdesc, internal=internal)

    # Retry if mozharness returns TBPL_RETRY
    worker['retry-exit-status'] = 4

    docker_worker_setup_secrets(config, job, taskdesc)

    command = [
        '/builds/worker/bin/run-task',
        '--vcs-checkout', '/builds/worker/workspace/build/src',
        '--tools-checkout', '/builds/worker/workspace/build/tools',
    ]
    if run['comm-checkout']:
        command.append('--comm-checkout=/builds/worker/workspace/build/src/comm')

    command += [
        '--',
        '/builds/worker/workspace/build/src/{}'.format(
            run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh')
        ),
    ]

    worker['command'] = command
예제 #35
0
def mozharness_on_docker_worker_setup(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['implementation'] = job['worker']['implementation']

    # running via mozharness assumes desktop-build (which contains build.sh)
    taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}

    worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')

    docker_worker_add_public_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(run['config']),
        'MOZHARNESS_SCRIPT': run['script'],
        'MH_BRANCH': config.params['project'],
        'MH_BUILD_POOL': 'taskcluster',
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
    })

    if 'actions' in run:
        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])

    if 'options' in run:
        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])

    if 'custom-build-variant-cfg' in run:
        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']

    if 'job-script' in run:
        env['JOB_SCRIPT'] = run['job-script']

    # if we're not keeping artifacts, set some env variables to empty values
    # that will cause the build process to skip copying the results to the
    # artifacts directory.  This will have no effect for operations that are
    # not builds.
    if not run['keep-artifacts']:
        env['DIST_TARGET_UPLOADS'] = ''
        env['DIST_UPLOADS'] = ''

    # Xvfb
    if run['need-xvfb']:
        env['NEED_XVFB'] = 'true'

    # tooltool downloads
    if run['tooltool-downloads']:
        worker['relengapi-proxy'] = True
        worker['caches'].append({
            'type': 'persistent',
            'name': 'tooltool-cache',
            'mount-point': '/home/worker/tooltool-cache',
        })
        taskdesc['scopes'].extend([
            'docker-worker:relengapi-proxy:tooltool.download.public',
        ])
        if run['tooltool-downloads'] == 'internal':
            taskdesc['scopes'].append(
                'docker-worker:relengapi-proxy:tooltool.download.internal')
        env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'

    # Retry if mozharness returns TBPL_RETRY
    worker['retry-exit-status'] = 4

    docker_worker_setup_secrets(config, job, taskdesc)

    command = [
        '/home/worker/bin/run-task',
        # Various caches/volumes are default owned by root:root.
        '--chown-recursive',
        '/home/worker/workspace',
        '--chown-recursive',
        '/home/worker/tooltool-cache',
        '--vcs-checkout',
        '/home/worker/workspace/build/src',
        '--tools-checkout',
        '/home/worker/workspace/build/tools',
        '--',
    ]
    command.append("/home/worker/workspace/build/src/{}".format(
        run.get('job-script', "taskcluster/scripts/builder/build-linux.sh")))

    worker['command'] = command
예제 #36
0
def mozharness_test_on_docker(config, job, taskdesc):
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "/builds/worker/workspace/build/upload/logs/"),
        ("public/test", "/builds/worker/artifacts/"),
        ("public/test_info/",
         "/builds/worker/workspace/build/blobber_upload_dir/"),
    ]

    installer_url = get_artifact_url('<build>',
                                     mozharness['build-artifact-name'])
    mozharness_url = get_artifact_url('<build>', 'public/build/mozharness.zip')

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('/builds/worker/workspace', path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    worker['caches'] = [{
        'type':
        'persistent',
        'name':
        'level-{}-{}-test-workspace'.format(config.params['level'],
                                            config.params['project']),
        'mount-point':
        "/builds/worker/workspace",
    }]

    env = worker['env'] = {
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
        'MOZ_AUTOMATION': '1',
    }

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params['project'] == 'try':
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options

    if mozharness['tooltool-downloads']:
        docker_worker_add_tooltool(config, job, taskdesc, internal=True)

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # assemble the command line
    command = [
        '/builds/worker/bin/run-task',
    ]

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        command.extend(['--vcs-checkout', '/builds/worker/checkouts/gecko'])
        env['MOZHARNESS_PATH'] = '/builds/worker/checkouts/gecko/testing/mozharness'
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    command.extend([
        '--',
        '/builds/worker/bin/test-linux.sh',
    ])

    if mozharness.get('no-read-buildbot-config'):
        command.append("--no-read-buildbot-config")
    command.extend([
        {
            "task-reference": "--installer-url=" + installer_url
        },
        {
            "task-reference":
            "--test-packages-url=" + test_packages_url(taskdesc)
        },
    ])
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            command.append('--total-chunk={}'.format(test['chunks']))
            command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    command[i] += suffix

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    worker['command'] = command
예제 #37
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker'] = job['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    if 'android-em-7.0-x86' in test['test-platform']:
        worker['privileged'] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        ("public/test_info/",
         "{workdir}/workspace/build/blobber_upload_dir/".format(**run)),
    ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        installer_url = get_artifact_url('<build>',
                                         mozharness['build-artifact-name'])

    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('{workdir}/workspace'.format(**run), path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'NEED_COMPIZ': 'true',
        'ENABLE_E10S': text_type(bool(test.get('e10s'))).lower(),
        'WORKING_DIR': '/builds/worker',
    })

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options
    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(
            **run)
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': six.ensure_text(json.dumps(extra_config))
    }

    command = [
        '{workdir}/bin/test-linux.sh'.format(**run),
    ]
    command.extend(mozharness.get('extra-options', []))

    if test.get('test-manifests'):
        env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
            json.dumps({test['suite']: test['test-manifests']}))

    # TODO: remove the need for run['chunked']
    elif mozharness.get('chunked') or test['chunks'] > 1:
        command.append('--total-chunk={}'.format(test['chunks']))
        command.append('--this-chunk={}'.format(test['this-chunk']))

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    job['run'] = {
        'workdir': run['workdir'],
        'tooltool-downloads': mozharness['tooltool-downloads'],
        'checkout': test['checkout'],
        'command': command,
        'using': 'run-task',
    }
    configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])