Example #1
0
def docker_worker_hazard(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []

    docker_worker_add_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc)
    add_tooltool(config, job, taskdesc)
    setup_secrets(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # script parameters
    if run.get('mozconfig'):
        env['MOZCONFIG'] = run['mozconfig']

    # build-haz-linux.sh needs this otherwise it assumes the checkout is in
    # the workspace.
    env['GECKO_DIR'] = '{workdir}/checkouts/gecko'.format(**run)

    worker['command'] = [
        '{workdir}/bin/run-task'.format(**run), '--gecko-checkout',
        '{workdir}/checkouts/gecko'.format(**run), '--', '/bin/bash', '-c',
        run['command']
    ]
Example #2
0
def docker_worker_run_task(config, job, taskdesc):
    run = job['run']
    worker = taskdesc['worker'] = job['worker']
    command = ['/builds/worker/bin/run-task']
    common_setup(config, job, taskdesc, command)

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    if run.get('cache-dotcache'):
        worker['caches'].append({
            'type':
            'persistent',
            'name':
            '{project}-dotcache'.format(**config.params),
            'mount-point':
            '{workdir}/.cache'.format(**run),
            'skip-untrusted':
            True,
        })

    run_command = run['command']
    # dict is for the case of `{'task-reference': basestring}`.
    if isinstance(run_command, (basestring, dict)):
        run_command = ['bash', '-cx', run_command]
    if run['comm-checkout']:
        command.append(
            '--comm-checkout={workdir}/checkouts/gecko/comm'.format(**run))
    command.append('--fetch-hgfingerprint')
    command.append('--')
    command.extend(run_command)
    worker['command'] = command
Example #3
0
def docker_worker_hazard(config, job, taskdesc):
    run = job["run"]

    worker = taskdesc["worker"] = job["worker"]
    worker.setdefault("artifacts", [])

    docker_worker_add_artifacts(config, job, taskdesc)
    worker.setdefault("required-volumes",
                      []).append("{workdir}/workspace".format(**run))
    add_tooltool(config, job, taskdesc)
    setup_secrets(config, job, taskdesc)

    env = worker["env"]
    env.update({
        "MOZ_BUILD_DATE": config.params["moz_build_date"],
        "MOZ_SCM_LEVEL": config.params["level"],
    })

    # script parameters
    if run.get("mozconfig"):
        env["MOZCONFIG"] = run.pop("mozconfig")

    run["using"] = "run-task"
    run["cwd"] = run["workdir"]
    configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
Example #4
0
def docker_worker_hazard(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker'] = job['worker']
    worker['artifacts'] = []

    docker_worker_add_artifacts(config, job, taskdesc)
    worker.setdefault('required-volumes',
                      []).append('{workdir}/workspace'.format(**run))
    add_tooltool(config, job, taskdesc)
    setup_secrets(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })

    # script parameters
    if run.get('mozconfig'):
        env['MOZCONFIG'] = run.pop('mozconfig')

    run['using'] = 'run-task'
    run['cwd'] = run['workdir']
    configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
Example #5
0
def generic_worker_run_task(config, job, taskdesc):
    run = job['run']
    worker = taskdesc['worker'] = job['worker']
    is_win = worker['os'] == 'windows'
    is_mac = worker['os'] == 'macosx'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    if is_win:
        command = ['C:/mozilla-build/python3/python3.exe', 'run-task']
    elif is_mac:
        command = ['/tools/python37/bin/python3.7', 'run-task']
        if job['worker-type'].endswith('1014'):
            command = ['/usr/local/bin/python3', 'run-task']
    else:
        command = ['./run-task']

    common_setup(config, job, taskdesc, command)

    worker.setdefault('mounts', [])
    if run.get('cache-dotcache'):
        worker['mounts'].append({
            'cache-name':
            '{project}-dotcache'.format(**config.params),
            'directory':
            '{workdir}/.cache'.format(**run),
        })
    worker['mounts'].append({
        'content': {
            'url': script_url(config, 'run-task'),
        },
        'file': './run-task',
    })
    if worker.get('env', {}).get('MOZ_FETCHES'):
        worker['mounts'].append({
            'content': {
                'url': script_url(config, 'misc/fetch-content'),
            },
            'file': './fetch-content',
        })

    run_command = run['command']
    if isinstance(run_command, basestring):
        if is_win:
            run_command = '"{}"'.format(run_command)
        run_command = ['bash', '-cx', run_command]

    command.append('--')
    command.extend(run_command)

    if is_win:
        worker['command'] = [' '.join(command)]
    else:
        worker['command'] = [
            ['chmod', '+x', 'run-task'],
            command,
        ]
Example #6
0
def docker_worker_spidermonkey(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker.setdefault('caches', []).append({
        'type':
        'persistent',
        'name':
        '{}-build-spidermonkey-workspace'.format(config.params['project']),
        'mount-point':
        "{workdir}/workspace".format(**run),
        'skip-untrusted':
        True,
    })

    docker_worker_add_artifacts(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
    })
    if 'spidermonkey-platform' in run:
        env['SPIDERMONKEY_PLATFORM'] = run['spidermonkey-platform']

    support_vcs_checkout(config, job, taskdesc)

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    worker['command'] = [
        '{workdir}/bin/run-task'.format(**run), '--gecko-checkout',
        '{workdir}/workspace/build/src'.format(**run), '--', '/bin/bash', '-c',
        'cd {workdir} && workspace/build/src/taskcluster/scripts/builder/{script}'
        .format(workdir=run['workdir'], script=script)
    ]
Example #7
0
def docker_worker_run_task(config, job, taskdesc):
    run = job['run']
    worker = taskdesc['worker'] = job['worker']
    command = ['/builds/worker/bin/run-task']
    common_setup(config, job, taskdesc, command)

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    if run.get('cache-dotcache'):
        worker['caches'].append({
            'type':
            'persistent',
            'name':
            '{project}-dotcache'.format(**config.params),
            'mount-point':
            '{workdir}/.cache'.format(**run),
            'skip-untrusted':
            True,
        })

    run_command = run['command']
    run_cwd = run.get('cwd')
    if run_cwd and run['checkout']:
        run_cwd = path.normpath(
            run_cwd.format(checkout=taskdesc['worker']['env']['GECKO_PATH']))
    elif run_cwd and "{checkout}" in run_cwd:
        raise Exception(
            "Found `{{checkout}}` interpolation in `cwd` for task {name} "
            "but the task doesn't have a checkout: {cwd}".format(
                cwd=run_cwd, name=job.get("name", job.get("label"))))

    # dict is for the case of `{'task-reference': text_type}`.
    if isinstance(run_command, (text_type, dict)):
        run_command = ['bash', '-cx', run_command]
    if run['comm-checkout']:
        command.append('--comm-checkout={}/comm'.format(
            taskdesc['worker']['env']['GECKO_PATH']))
    command.append('--fetch-hgfingerprint')
    if run['run-as-root']:
        command.extend(('--user', 'root', '--group', 'root'))
    if run_cwd:
        command.extend(('--task-cwd', run_cwd))
    command.append('--')
    command.extend(run_command)
    worker['command'] = command
Example #8
0
def docker_worker_run_task(config, job, taskdesc):
    run = job["run"]
    worker = taskdesc["worker"] = job["worker"]
    command = ["/builds/worker/bin/run-task"]
    common_setup(config, job, taskdesc, command)

    if run["tooltool-downloads"]:
        internal = run["tooltool-downloads"] == "internal"
        add_tooltool(config, job, taskdesc, internal=internal)

    if run.get("cache-dotcache"):
        worker["caches"].append({
            "type":
            "persistent",
            "name":
            "{project}-dotcache".format(**config.params),
            "mount-point":
            "{workdir}/.cache".format(**run),
            "skip-untrusted":
            True,
        })

    run_command = run["command"]
    run_cwd = run.get("cwd")
    if run_cwd and run["checkout"]:
        run_cwd = path.normpath(
            run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"]))
    elif run_cwd and "{checkout}" in run_cwd:
        raise Exception(
            "Found `{{checkout}}` interpolation in `cwd` for task {name} "
            "but the task doesn't have a checkout: {cwd}".format(
                cwd=run_cwd, name=job.get("name", job.get("label"))))

    # dict is for the case of `{'task-reference': text_type}`.
    if isinstance(run_command, (text_type, dict)):
        run_command = ["bash", "-cx", run_command]
    if run["comm-checkout"]:
        command.append("--comm-checkout={}/comm".format(
            taskdesc["worker"]["env"]["GECKO_PATH"]))
    command.append("--fetch-hgfingerprint")
    if run["run-as-root"]:
        command.extend(("--user", "root", "--group", "root"))
    if run_cwd:
        command.extend(("--task-cwd", run_cwd))
    command.append("--")
    command.extend(run_command)
    worker["command"] = command
Example #9
0
def docker_worker_spidermonkey(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker'] = job['worker']
    worker['artifacts'] = []
    worker.setdefault('caches', []).append({
        'type':
        'persistent',
        'name':
        '{}-build-spidermonkey-workspace'.format(config.params['project']),
        'mount-point':
        "{workdir}/workspace".format(**run),
        'skip-untrusted':
        True,
    })

    docker_worker_add_artifacts(config, job, taskdesc)
    add_tooltool(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run.pop('spidermonkey-variant'),
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'GECKO_PATH': '{}/workspace/build/src'.format(run['workdir'])
    })
    if 'spidermonkey-platform' in run:
        env['SPIDERMONKEY_PLATFORM'] = run.pop('spidermonkey-platform')

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"

    run['using'] = 'run-task'
    run['cwd'] = run['workdir']
    run['command'] = [
        'workspace/build/src/taskcluster/scripts/builder/{script}'.format(
            script=script)
    ]

    configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
Example #10
0
def generic_worker_run_task(config, job, taskdesc):
    run = job['run']
    worker = taskdesc['worker'] = job['worker']
    is_win = worker['os'] == 'windows'
    is_mac = worker['os'] == 'macosx'
    is_bitbar = worker['os'] == 'linux-bitbar'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    if is_win:
        command = ['C:/mozilla-build/python3/python3.exe', 'run-task']
    elif is_mac:
        command = ['/tools/python37/bin/python3.7', 'run-task']
        if job['worker-type'].endswith(('1014', '1014-pgo')):
            command = ['/usr/local/bin/python3', 'run-task']
    else:
        command = ['./run-task']

    common_setup(config, job, taskdesc, command)

    worker.setdefault('mounts', [])
    if run.get('cache-dotcache'):
        worker['mounts'].append({
            'cache-name':
            '{project}-dotcache'.format(**config.params),
            'directory':
            '{workdir}/.cache'.format(**run),
        })
    worker['mounts'].append({
        'content': {
            'url': script_url(config, 'run-task'),
        },
        'file': './run-task',
    })
    if worker.get('env', {}).get('MOZ_FETCHES'):
        worker['mounts'].append({
            'content': {
                'url': script_url(config, 'misc/fetch-content'),
            },
            'file': './fetch-content',
        })

    run_command = run['command']
    run_cwd = run.get('cwd')
    if run_cwd and run['checkout']:
        run_cwd = path.normpath(
            run_cwd.format(checkout=taskdesc['worker']['env']['GECKO_PATH']))
    elif run_cwd and "{checkout}" in run_cwd:
        raise Exception(
            "Found `{{checkout}}` interpolation in `cwd` for task {name} "
            "but the task doesn't have a checkout: {cwd}".format(
                cwd=run_cwd, name=job.get("name", job.get("label"))))

    if isinstance(run_command, text_type):
        if is_win:
            run_command = '"{}"'.format(run_command)
        run_command = ['bash', '-cx', run_command]

    if run['comm-checkout']:
        command.append('--comm-checkout={}/comm'.format(
            taskdesc['worker']['env']['GECKO_PATH']))

    if run['run-as-root']:
        command.extend(('--user', 'root', '--group', 'root'))
    if run_cwd:
        command.extend(('--task-cwd', run_cwd))
    command.append('--')
    if is_bitbar:
        # Use the bitbar wrapper script which sets up the device and adb
        # environment variables
        command.append('/builds/taskcluster/script.py')
    command.extend(run_command)

    if is_win:
        worker['command'] = [' '.join(command)]
    else:
        worker['command'] = [
            ['chmod', '+x', 'run-task'],
            command,
        ]
Example #11
0
def generic_worker_spidermonkey(config, job, taskdesc):
    assert job['worker']['os'] == 'windows', 'only supports windows right now'

    run = job['run']

    worker = taskdesc['worker']

    generic_worker_add_artifacts(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_DISABLE': 'true',
        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'SCCACHE_DISABLE': "1",
        'WORK': ".",  # Override the defaults in build scripts
        'SRCDIR': "./src",  # with values suiteable for windows generic worker
        'UPLOAD_DIR': "./public/build"
    })
    if 'spidermonkey-platform' in run:
        env['SPIDERMONKEY_PLATFORM'] = run['spidermonkey-platform']

    script = "build-sm.sh"
    if run['using'] == 'spidermonkey-package':
        script = "build-sm-package.sh"
        # Don't allow untested configurations yet
        raise Exception(
            "spidermonkey-package is not a supported configuration")
    elif run['using'] == 'spidermonkey-mozjs-crate':
        script = "build-sm-mozjs-crate.sh"
        # Don't allow untested configurations yet
        raise Exception(
            "spidermonkey-mozjs-crate is not a supported configuration")
    elif run['using'] == 'spidermonkey-rust-bindings':
        script = "build-sm-rust-bindings.sh"
        # Don't allow untested configurations yet
        raise Exception(
            "spidermonkey-rust-bindings is not a supported configuration")

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    hg_command = generic_worker_hg_commands(
        'https://hg.mozilla.org/mozilla-unified',
        env['GECKO_HEAD_REPOSITORY'],
        env['GECKO_HEAD_REV'],
        r'.\src',
    )[0]

    command = [
        'c:\\mozilla-build\\msys\\bin\\bash.exe '  # string concat
        '"./src/taskcluster/scripts/builder/%s"' % script
    ]

    worker['command'] = [
        hg_command,
        ' '.join(command),
    ]
Example #12
0
def windows_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']

    worker['artifacts'] = [{
        'path': r'public\build',
        'type': 'directory',
    }]
    worker['chain-of-trust'] = True

    # There were no caches on generic-worker before bug 1519472, and they cause
    # all sorts of problems with toolchain tasks, disable them until
    # tasks are ready.
    run['use-caches'] = False
    support_vcs_checkout(config,
                         job,
                         taskdesc,
                         sparse=('sparse-profile' in run))

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
    })

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    sparse_profile = run.get('sparse-profile')
    if sparse_profile:
        sparse_profile = 'build/sparse-profiles/{}'.format(
            run['sparse-profile'])

    hg_command = generic_worker_hg_commands(
        'https://hg.mozilla.org/mozilla-unified',
        env['GECKO_HEAD_REPOSITORY'],
        env['GECKO_HEAD_REV'],
        r'.\build\src',
        sparse_profile=sparse_profile)[0]

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        raise NotImplementedError("Python scripts don't work on Windows")

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    bash = r'c:\mozilla-build\msys\bin\bash'
    worker['command'] = [
        hg_command,
        # do something intelligent.
        r'{} build/src/taskcluster/scripts/misc/{}{}'.format(
            bash, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        taskdesc['cache'] = {
            'type': CACHE_TYPE,
            'name': name,
            'digest-data': get_digest_data(config, run, taskdesc),
        }
Example #13
0
def mozharness_on_generic_worker(config, job, taskdesc):
    assert (
        job["worker"]["os"] == "windows"
    ), "only supports windows right now: {}".format(job["label"])

    run = job['run']

    # fail if invalid run options are included
    invalid = []
    for prop in ['need-xvfb']:
        if prop in run and run[prop]:
            invalid.append(prop)
    if not run.get('keep-artifacts', True):
        invalid.append('keep-artifacts')
    if invalid:
        raise Exception("Jobs run using mozharness on Windows do not support properties " +
                        ', '.join(invalid))

    worker = taskdesc['worker']

    worker['taskcluster-proxy'] = run.pop('taskcluster-proxy', None)

    setup_secrets(config, job, taskdesc)

    taskdesc['worker'].setdefault('artifacts', []).append({
        'name': 'public/logs',
        'path': 'logs',
        'type': 'directory'
    })
    if not worker.get('skip-artifacts', False):
        generic_worker_add_artifacts(config, job, taskdesc)
    support_vcs_checkout(config, job, taskdesc)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
        'MH_BRANCH': config.params['project'],
        'MOZ_SOURCE_CHANGESET': get_branch_rev(config),
        'MOZ_SOURCE_REPO': get_branch_repo(config),
    })
    if run['use-simple-package']:
        env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'})

    if 'extra-config' in run:
        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config'])

    # The windows generic worker uses batch files to pass environment variables
    # to commands.  Setting a variable to empty in a batch file unsets, so if
    # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
    # mozharness doesn't try to find the commit message on its own.
    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message'] or 'no commit message'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    if not job['attributes']['build_platform'].startswith('win'):
        raise Exception(
            "Task generation for mozharness build jobs currently only supported on Windows"
        )

    # TODO We should run the mozharness script with `mach python` so these
    # modules are automatically available, but doing so somehow caused hangs in
    # Windows ccov builds (see bug 1543149).
    gecko = env['GECKO_PATH'].replace('.', '%cd%')
    mozbase_dir = "{}/testing/mozbase".format(gecko)
    env['PYTHONPATH'] = ';'.join([
        "{}/manifestparser".format(mozbase_dir),
        "{}/mozinfo".format(mozbase_dir),
        "{}/mozfile".format(mozbase_dir),
        "{}/mozprocess".format(mozbase_dir),
        "{}/third_party/python/six".format(gecko),
    ])

    mh_command = [
            'c:/mozilla-build/python/python.exe',
            '{}/testing/{}'.format(gecko, run['script']),
    ]

    if 'config-paths' in run:
        for path in run['config-paths']:
            mh_command.append('--extra-config-path {}/{}'.format(gecko, path))

    for cfg in run['config']:
        mh_command.append('--config ' + cfg)
    if run['use-magic-mh-args']:
        mh_command.append('--branch ' + config.params['project'])
    mh_command.append(r'--work-dir %cd:Z:=z:%\build')
    for action in run.get('actions', []):
        mh_command.append('--' + action)

    for option in run.get('options', []):
        mh_command.append('--' + option)
    if run.get('custom-build-variant-cfg'):
        mh_command.append('--custom-build-variant')
        mh_command.append(run['custom-build-variant-cfg'])

    hg_commands = generic_worker_hg_commands(
        base_repo=env['GECKO_BASE_REPOSITORY'],
        head_repo=env['GECKO_HEAD_REPOSITORY'],
        head_rev=env['GECKO_HEAD_REV'],
        path=r'.\build\src',
    )

    if run['comm-checkout']:
        hg_commands.extend(
            generic_worker_hg_commands(
                base_repo=env['COMM_BASE_REPOSITORY'],
                head_repo=env['COMM_HEAD_REPOSITORY'],
                head_rev=env['COMM_HEAD_REV'],
                path=r'.\build\src\comm'))

    fetch_commands = []
    if 'MOZ_FETCHES' in env:
        # When Bug 1436037 is fixed, run-task can be used for this task,
        # and this call can go away
        fetch_commands.append(' '.join([
            r'c:\mozilla-build\python3\python3.exe',
            r'build\src\taskcluster\scripts\misc\fetch-content',
            'task-artifacts',
        ]))

    worker['command'] = []
    if taskdesc.get('needs-sccache'):
        worker['command'].extend([
            # Make the comment part of the first command, as it will help users to
            # understand what is going on, and why these steps are implemented.
            dedent('''\
            :: sccache currently uses the full compiler commandline as input to the
            :: cache hash key, so create a symlink to the task dir and build from
            :: the symlink dir to get consistent paths.
            if exist z:\\build rmdir z:\\build'''),
            r'mklink /d z:\build %cd%',
            # Grant delete permission on the link to everyone.
            r'icacls z:\build /grant *S-1-1-0:D /L',
            r'cd /d z:\build',
        ])

    worker['command'].extend(hg_commands)
    worker['command'].extend(fetch_commands)
    worker['command'].extend([
        ' '.join(mh_command)
    ])
Example #14
0
def mozharness_on_docker_worker_setup(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['implementation'] = job['worker']['implementation']

    if not run['use-simple-package']:
        raise NotImplementedError("Simple packaging cannot be disabled via"
                                  "'use-simple-package' on docker-workers")
    if not run['use-magic-mh-args']:
        raise NotImplementedError("Cannot disabled mh magic arg passing via"
                                  "'use-magic-mh-args' on docker-workers")

    # Running via mozharness assumes an image that contains build.sh:
    # by default, debian7-amd64-build, but it could be another image (like
    # android-build).
    taskdesc['worker'].setdefault('docker-image', {'in-tree': 'debian7-amd64-build'})

    taskdesc['worker'].setdefault('artifacts', []).append({
        'name': 'public/logs',
        'path': '{workdir}/logs/'.format(**run),
        'type': 'directory'
    })
    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')
    docker_worker_add_artifacts(config, job, taskdesc)
    docker_worker_add_workspace_cache(config, job, taskdesc,
                                      extra=run.get('extra-workspace-cache-key'))
    support_vcs_checkout(config, job, taskdesc)

    env = worker.setdefault('env', {})
    env.update({
        'GECKO_PATH': '{workdir}/workspace/build/src'.format(**run),
        'MOZHARNESS_CONFIG': ' '.join(run['config']),
        'MOZHARNESS_SCRIPT': run['script'],
        'MH_BRANCH': config.params['project'],
        'MOZ_SOURCE_CHANGESET': get_branch_rev(config),
        'MOZ_SOURCE_REPO': get_branch_repo(config),
        'MH_BUILD_POOL': 'taskcluster',
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'MOZ_AUTOMATION': '1',
        'PYTHONUNBUFFERED': '1',
    })

    if 'actions' in run:
        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])

    if 'options' in run:
        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])

    if 'config-paths' in run:
        env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run['config-paths'])

    if 'custom-build-variant-cfg' in run:
        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']

    if 'extra-config' in run:
        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run['extra-config'])

    if 'job-script' in run:
        env['JOB_SCRIPT'] = run['job-script']

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # if we're not keeping artifacts, set some env variables to empty values
    # that will cause the build process to skip copying the results to the
    # artifacts directory.  This will have no effect for operations that are
    # not builds.
    if not run['keep-artifacts']:
        env['DIST_TARGET_UPLOADS'] = ''
        env['DIST_UPLOADS'] = ''

    # Xvfb
    if run['need-xvfb']:
        env['NEED_XVFB'] = 'true'
    else:
        env['NEED_XVFB'] = 'false'

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    # Retry if mozharness returns TBPL_RETRY
    worker['retry-exit-status'] = [4]

    setup_secrets(config, job, taskdesc)

    command = [
        '{workdir}/bin/run-task'.format(**run),
        '--gecko-checkout', env['GECKO_PATH'],
    ]
    if run['comm-checkout']:
        command.append('--comm-checkout={workdir}/workspace/build/src/comm'.format(**run))

    command += [
        '--',
        '{workdir}/workspace/build/src/{script}'.format(
            workdir=run['workdir'],
            script=run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh'),
        ),
    ]

    worker['command'] = command
Example #15
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    if 'android-em-7.0-x86' in test['test-platform']:
        worker['privileged'] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        ("public/test_info/",
         "{workdir}/workspace/build/blobber_upload_dir/".format(**run)),
    ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        installer_url = get_artifact_url('<build>',
                                         mozharness['build-artifact-name'])

    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('{workdir}/workspace'.format(**run), path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
        'MOZ_AUTOMATION': '1',
        'WORKING_DIR': '/builds/worker',
    })

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options

    if mozharness['tooltool-downloads']:
        internal = mozharness['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # assemble the command line
    command = [
        '{workdir}/bin/run-task'.format(**run),
    ]

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        command.extend(
            ['--gecko-checkout', '{workdir}/checkouts/gecko'.format(**run)])
        env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(
            **run)
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': json.dumps(extra_config)
    }

    command.extend([
        '--',
        '{workdir}/bin/test-linux.sh'.format(**run),
    ])
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            command.append('--total-chunk={}'.format(test['chunks']))
            command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    command[i] += suffix

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    worker['command'] = command
Example #16
0
def mozharness_test_on_generic_worker(config, job, taskdesc):
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    bitbar_script = 'test-linux.sh'
    bitbar_wrapper = '/builds/taskcluster/script.py'

    is_macosx = worker['os'] == 'macosx'
    is_windows = worker['os'] == 'windows'
    is_linux = worker['os'] == 'linux' or worker['os'] == 'linux-bitbar'
    is_bitbar = worker['os'] == 'linux-bitbar'
    assert is_macosx or is_windows or is_linux

    artifacts = [
        {
            'name': 'public/logs',
            'path': 'logs',
            'type': 'directory'
        },
    ]

    # jittest doesn't have blob_upload_dir
    if test['test-name'] != 'jittest':
        artifacts.append({
            'name': 'public/test_info',
            'path': 'build/blobber_upload_dir',
            'type': 'directory'
        })

    if is_bitbar:
        artifacts = [
            {
                'name': 'public/test/',
                'path': 'artifacts/public',
                'type': 'directory'
            },
            {
                'name': 'public/logs/',
                'path': 'workspace/logs',
                'type': 'directory'
            },
            {
                'name': 'public/test_info/',
                'path': 'workspace/build/blobber_upload_dir',
                'type': 'directory'
            },
        ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        upstream_task = '<build-signing>' if mozharness[
            'requires-signed-builds'] else '<build>'
        installer_url = get_artifact_url(upstream_task,
                                         mozharness['build-artifact-name'])

    worker['os-groups'] = test['os-groups']

    # run-as-administrator is a feature for workers with UAC enabled and as such should not be
    # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
    # gecko Windows 10 workers, however this may be subject to change. Worker type
    # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
    # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
    # for more information about UAC.
    if test.get('run-as-administrator', False):
        if job['worker-type'].startswith('t-win10-64'):
            worker['run-as-administrator'] = True
        else:
            raise Exception('run-as-administrator not supported on {}'.format(
                job['worker-type']))

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    worker['max-run-time'] = test['max-run-time']
    worker['artifacts'] = artifacts

    env = worker.setdefault('env', {})
    env['MOZ_AUTOMATION'] = '1'
    env['GECKO_HEAD_REPOSITORY'] = config.params['head_repository']
    env['GECKO_HEAD_REV'] = config.params['head_rev']

    # this list will get cleaned up / reduced / removed in bug 1354088
    if is_macosx:
        env.update({
            'IDLEIZER_DISABLE_SHUTDOWN': 'true',
            'LANG': 'en_US.UTF-8',
            'LC_ALL': 'en_US.UTF-8',
            'MOZ_HIDE_RESULTS_TABLE': '1',
            'MOZ_NODE_PATH': '/usr/local/bin/node',
            'MOZ_NO_REMOTE': '1',
            'NO_FAIL_ON_TEST_ERRORS': '1',
            'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin',
            'SHELL': '/bin/bash',
            'XPCOM_DEBUG_BREAK': 'warn',
            'XPC_FLAGS': '0x0',
            'XPC_SERVICE_NAME': '0',
        })
    elif is_bitbar:
        env.update({
            'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
            'MOZHARNESS_SCRIPT': mozharness['script'],
            'MOZHARNESS_URL': {
                'artifact-reference': '<build/public/build/mozharness.zip>'
            },
            'MOZILLA_BUILD_URL': {
                'task-reference': installer_url
            },
            "MOZ_NO_REMOTE": '1',
            "NEED_XVFB": "false",
            "XPCOM_DEBUG_BREAK": 'warn',
            "NO_FAIL_ON_TEST_ERRORS": '1',
            "MOZ_HIDE_RESULTS_TABLE": '1',
            "MOZ_NODE_PATH": "/usr/local/bin/node",
            'TASKCLUSTER_WORKER_TYPE': job['worker-type'],
        })

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': json.dumps(extra_config)
    }

    if is_windows:
        mh_command = [
            'c:\\mozilla-build\\python\\python.exe', '-u',
            'mozharness\\scripts\\' + normpath(mozharness['script'])
        ]
    elif is_bitbar:
        mh_command = [bitbar_wrapper, 'bash', "./{}".format(bitbar_script)]
    elif is_macosx and 'macosx1014-64' in test['test-platform']:
        mh_command = [
            '/usr/local/bin/python2', '-u',
            'mozharness/scripts/' + mozharness['script']
        ]
    else:
        # is_linux or is_macosx
        mh_command = [
            'python2.7', '-u', 'mozharness/scripts/' + mozharness['script']
        ]

    for mh_config in mozharness['config']:
        cfg_path = 'mozharness/configs/' + mh_config
        if is_windows:
            cfg_path = normpath(cfg_path)
        mh_command.extend(['--cfg', cfg_path])
    mh_command.extend(mozharness.get('extra-options', []))
    if mozharness.get('download-symbols'):
        if isinstance(mozharness['download-symbols'], basestring):
            mh_command.extend(
                ['--download-symbols', mozharness['download-symbols']])
        else:
            mh_command.extend(['--download-symbols', 'true'])
    if mozharness.get('include-blob-upload-branch'):
        mh_command.append('--blob-upload-branch=' + config.params['project'])
    mh_command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            mh_command.append('--total-chunk={}'.format(test['chunks']))
            mh_command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(mh_command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    mh_command[i] += suffix

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    if mozharness['tooltool-downloads']:
        internal = mozharness['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    worker['mounts'] = [{
        'directory': '.',
        'content': {
            'artifact': get_artifact_path(taskdesc, 'mozharness.zip'),
            'task-id': {
                'task-reference': '<build>'
            }
        },
        'format': 'zip'
    }]
    if is_bitbar:
        a_url = '{}/raw-file/{}/taskcluster/scripts/tester/{}'.format(
            config.params['head_repository'], config.params['head_rev'],
            bitbar_script)
        worker['mounts'] = [{
            'file': bitbar_script,
            'content': {
                'url': a_url,
            },
        }]

    if is_windows:
        worker['command'] = [' '.join(mh_command)]
    else:  # is_macosx
        worker['command'] = [mh_command]
Example #17
0
def generic_worker_run_task(config, job, taskdesc):
    run = job["run"]
    worker = taskdesc["worker"] = job["worker"]
    is_win = worker["os"] == "windows"
    is_mac = worker["os"] == "macosx"
    is_bitbar = worker["os"] == "linux-bitbar"

    if run["tooltool-downloads"]:
        internal = run["tooltool-downloads"] == "internal"
        add_tooltool(config, job, taskdesc, internal=internal)

    if is_win:
        command = ["C:/mozilla-build/python3/python3.exe", "run-task"]
    elif is_mac:
        command = ["/usr/local/bin/python3", "run-task"]
    else:
        command = ["./run-task"]

    common_setup(config, job, taskdesc, command)

    worker.setdefault("mounts", [])
    if run.get("cache-dotcache"):
        worker["mounts"].append({
            "cache-name":
            "{project}-dotcache".format(**config.params),
            "directory":
            "{workdir}/.cache".format(**run),
        })
    worker["mounts"].append({
        "content": {
            "url": script_url(config, "run-task"),
        },
        "file": "./run-task",
    })
    if worker.get("env", {}).get("MOZ_FETCHES"):
        worker["mounts"].append({
            "content": {
                "url": script_url(config, "misc/fetch-content"),
            },
            "file": "./fetch-content",
        })

    run_command = run["command"]
    run_cwd = run.get("cwd")
    if run_cwd and run["checkout"]:
        run_cwd = path.normpath(
            run_cwd.format(checkout=taskdesc["worker"]["env"]["GECKO_PATH"]))
    elif run_cwd and "{checkout}" in run_cwd:
        raise Exception(
            "Found `{{checkout}}` interpolation in `cwd` for task {name} "
            "but the task doesn't have a checkout: {cwd}".format(
                cwd=run_cwd, name=job.get("name", job.get("label"))))

    # dict is for the case of `{'task-reference': text_type}`.
    if isinstance(run_command, (text_type, dict)):
        if is_win:
            if isinstance(run_command, dict):
                for k in run_command.keys():
                    run_command[k] = '"{}"'.format(run_command[k])
            else:
                run_command = '"{}"'.format(run_command)
        run_command = ["bash", "-cx", run_command]

    if run["comm-checkout"]:
        command.append("--comm-checkout={}/comm".format(
            taskdesc["worker"]["env"]["GECKO_PATH"]))

    if run["run-as-root"]:
        command.extend(("--user", "root", "--group", "root"))
    if run_cwd:
        command.extend(("--task-cwd", run_cwd))
    command.append("--")
    if is_bitbar:
        # Use the bitbar wrapper script which sets up the device and adb
        # environment variables
        command.append("/builds/taskcluster/script.py")
    command.extend(run_command)

    if is_win:
        taskref = False
        for c in command:
            if isinstance(c, dict):
                taskref = True

        if taskref:
            cmd = []
            for c in command:
                if isinstance(c, dict):
                    for v in c.values():
                        cmd.append(v)
                else:
                    cmd.append(c)
            worker["command"] = [{"artifact-reference": " ".join(cmd)}]
        else:
            worker["command"] = [" ".join(command)]
    else:
        worker["command"] = [
            ["chmod", "+x", "run-task"],
            command,
        ]
Example #18
0
def docker_worker_toolchain(config, job, taskdesc):
    run = job['run']

    worker = taskdesc['worker']
    worker['chain-of-trust'] = True

    # If the task doesn't have a docker-image, set a default
    worker.setdefault('docker-image', {'in-tree': 'toolchain-build'})

    # Allow the job to specify where artifacts come from, but add
    # public/build if it's not there already.
    artifacts = worker.setdefault('artifacts', [])
    if not any(
            artifact.get('name') == 'public/build' for artifact in artifacts):
        docker_worker_add_artifacts(config, job, taskdesc)

    support_vcs_checkout(config,
                         job,
                         taskdesc,
                         sparse=('sparse-profile' in run))

    # Toolchain checkouts don't live under {workdir}/checkouts
    workspace = '{workdir}/workspace/build'.format(**run)
    gecko_path = '{}/src'.format(workspace)

    env = worker['env']
    env.update({
        'MOZ_BUILD_DATE': config.params['moz_build_date'],
        'MOZ_SCM_LEVEL': config.params['level'],
        'TOOLS_DISABLE': 'true',
        'MOZ_AUTOMATION': '1',
        'MOZ_FETCHES_DIR': workspace,
        'GECKO_PATH': gecko_path,
    })

    if run['tooltool-downloads']:
        internal = run['tooltool-downloads'] == 'internal'
        add_tooltool(config, job, taskdesc, internal=internal)

    # Use `mach` to invoke python scripts so in-tree libraries are available.
    if run['script'].endswith('.py'):
        wrapper = '{}/mach python '.format(gecko_path)
    else:
        wrapper = ''

    args = run.get('arguments', '')
    if args:
        args = ' ' + shell_quote(*args)

    sparse_profile = []
    if run.get('sparse-profile'):
        sparse_profile = [
            '--gecko-sparse-profile=build/sparse-profiles/{}'.format(
                run['sparse-profile'])
        ]

    worker['command'] = [
        '{workdir}/bin/run-task'.format(**run),
        '--gecko-checkout={}'.format(gecko_path),
    ] + sparse_profile + [
        '--', 'bash', '-c', 'cd {} && '
        '{}workspace/build/src/taskcluster/scripts/misc/{}{}'.format(
            run['workdir'], wrapper, run['script'], args)
    ]

    attributes = taskdesc.setdefault('attributes', {})
    attributes['toolchain-artifact'] = run['toolchain-artifact']
    if 'toolchain-alias' in run:
        attributes['toolchain-alias'] = run['toolchain-alias']

    if not taskgraph.fast:
        name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
        taskdesc['cache'] = {
            'type': CACHE_TYPE,
            'name': name,
            'digest-data': get_digest_data(config, run, taskdesc),
        }