def make_release_generate_checksums_signing_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = copy_attributes_from_dependent_job(dep_job)

        treeherder = job.get("treeherder", {})
        treeherder.setdefault("symbol", "SGenChcks")
        dep_th_platform = (dep_job.task.get("extra", {}).get(
            "treeherder", {}).get("machine", {}).get("platform", ""))
        treeherder.setdefault("platform", "{}/opt".format(dep_th_platform))
        treeherder.setdefault("tier", 1)
        treeherder.setdefault("kind", "build")

        job_template = "{}-{}".format(dep_job.label, "signing")
        label = job.get("label", job_template)
        description = "Signing of the overall release-related checksums"

        dependencies = {dep_job.kind: dep_job.label}

        upstream_artifacts = [{
            "taskId": {
                "task-reference": "<{}>".format(str(dep_job.kind))
            },
            "taskType":
            "build",
            "paths": [
                get_artifact_path(dep_job, "SHA256SUMS"),
                get_artifact_path(dep_job, "SHA512SUMS"),
            ],
            "formats": ["autograph_gpg"],
        }]

        signing_cert_scope = get_signing_cert_scope(config)

        task = {
            "label": label,
            "description": description,
            "worker-type": "linux-signing",
            "worker": {
                "implementation": "scriptworker-signing",
                "upstream-artifacts": upstream_artifacts,
                "max-run-time": 3600,
            },
            "scopes": [
                signing_cert_scope,
            ],
            "dependencies": dependencies,
            "attributes": attributes,
            "run-on-projects": dep_job.attributes.get("run_on_projects"),
            "treeherder": treeherder,
        }

        yield task
Esempio n. 2
0
def make_release_generate_checksums_signing_description(config, jobs):
    for job in jobs:
        dep_job = job['dependent-task']
        attributes = copy_attributes_from_dependent_job(dep_job)

        treeherder = job.get('treeherder', {})
        treeherder.setdefault('symbol', 'SGenChcks')
        dep_th_platform = dep_job.task.get('extra', {}).get(
            'treeherder', {}).get('machine', {}).get('platform', '')
        treeherder.setdefault('platform', "{}/opt".format(dep_th_platform))
        treeherder.setdefault('tier', 1)
        treeherder.setdefault('kind', 'build')

        job_template = "{}-{}".format(dep_job.label, "signing")
        label = job.get("label", job_template)
        description = "Signing of the overall release-related checksums"

        dependencies = {"build": dep_job.label}

        upstream_artifacts = [{
            "taskId": {
                "task-reference": "<build>"
            },
            "taskType":
            "build",
            "paths": [
                get_artifact_path(dep_job, "SHA256SUMS"),
                get_artifact_path(dep_job, "SHA512SUMS"),
            ],
            "formats": ["gpg"]
        }]

        signing_cert_scope = get_signing_cert_scope(config)

        task = {
            'label': label,
            'description': description,
            'worker-type': get_worker_type_for_scope(config,
                                                     signing_cert_scope),
            'worker': {
                'implementation': 'scriptworker-signing',
                'upstream-artifacts': upstream_artifacts,
                'max-run-time': 3600
            },
            'scopes':
            [signing_cert_scope, "project:releng:signing:format:gpg"],
            'dependencies': dependencies,
            'attributes': attributes,
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'treeherder': treeherder,
        }

        yield task
Esempio n. 3
0
def define_upstream_artifacts(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        build_platform = dep_job.attributes.get('build_platform')

        job['attributes'] = copy_attributes_from_dependent_job(dep_job)

        artifacts_specifications = generate_specifications_of_artifacts_to_sign(
            config,
            job,
            keep_locale_template=False,
            kind=config.kind,
        )

        if 'android' in build_platform:
            # We're in the job that creates both multilocale and en-US APKs
            artifacts_specifications[0]['artifacts'].append(
                get_artifact_path(dep_job, 'en-US/target.apk'))

        job['upstream-artifacts'] = [{
            'taskId': {
                'task-reference': '<build>'
            },
            'taskType': 'build',
            'paths': spec['artifacts'],
            'formats': spec['formats'],
        } for spec in artifacts_specifications]

        yield job
Esempio n. 4
0
def set_target(config, tests):
    for test in tests:
        build_platform = test['build-platform']
        target = None
        if 'target' in test:
            resolve_keyed_by(test, 'target', item_name=test['test-name'])
            target = test['target']
        if not target:
            if build_platform.startswith('macosx'):
                target = 'target.dmg'
            elif build_platform.startswith('android'):
                target = 'target.apk'
            elif build_platform.startswith('win'):
                target = 'target.zip'
            else:
                target = 'target.tar.bz2'

        if isinstance(target, dict):
            # TODO Remove hardcoded mobile artifact prefix
            index_url = get_index_url(target['index'])
            installer_url = '{}/artifacts/public/{}'.format(
                index_url, target['name'])
            test['mozharness']['installer-url'] = installer_url
        else:
            test['mozharness']['build-artifact-name'] = get_artifact_path(
                test, target)

        yield test
Esempio n. 5
0
def test_packages_url(taskdesc):
    """Account for different platforms that name their test packages differently"""
    artifact_url = get_artifact_url('<build>', get_artifact_path(taskdesc,
                                    'target.test_packages.json'))
    # for android nightly we need to add 'en-US' to the artifact url
    test = taskdesc['run']['test']
    if get_variant(test['test-platform']) == "nightly" and 'android' in test['test-platform']:
        head, tail = os.path.split(artifact_url)
        artifact_url = os.path.join(head, 'en-US', tail)
    return artifact_url
Esempio n. 6
0
def test_packages_url(taskdesc):
    """Account for different platforms that name their test packages differently"""
    artifact_url = get_artifact_url(
        "<build>", get_artifact_path(taskdesc, "target.test_packages.json"))
    # for android shippable we need to add 'en-US' to the artifact url
    test = taskdesc["run"]["test"]
    if "android" in test["test-platform"] and (get_variant(
            test["test-platform"]) in ("shippable", "shippable-qr")):
        head, tail = os.path.split(artifact_url)
        artifact_url = os.path.join(head, "en-US", tail)
    return artifact_url
Esempio n. 7
0
def set_target(config, tests):
    for test in tests:
        build_platform = test['build-platform']
        target = None
        if 'target' in test:
            resolve_keyed_by(test, 'target', item_name=test['test-name'])
            target = test['target']
        if not target:
            if build_platform.startswith('macosx'):
                target = 'target.dmg'
            elif build_platform.startswith('android'):
                target = 'target.apk'
            elif build_platform.startswith('win'):
                target = 'target.zip'
            else:
                target = 'target.tar.bz2'
        test['mozharness']['build-artifact-name'] = get_artifact_path(test, target)

        yield test
Esempio n. 8
0
def mozharness_test_on_native_engine(config, job, taskdesc):
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']
    is_talos = test['suite'] == 'talos' or test['suite'] == 'raptor'
    is_macosx = worker['os'] == 'macosx'

    installer_url = get_artifact_url('<build>',
                                     mozharness['build-artifact-name'])
    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [
        {
            'name': prefix.rstrip('/'),
            'path': path.rstrip('/'),
            'type': 'directory',
        } for (prefix, path) in [
            # (artifact name prefix, in-image path relative to homedir)
            ("public/logs/", "workspace/build/upload/logs/"),
            ("public/test", "artifacts/"),
            ("public/test_info/", "workspace/build/blobber_upload_dir/"),
        ]
    ]

    if test['reboot']:
        worker['reboot'] = test['reboot']

    if test['max-run-time']:
        worker['max-run-time'] = test['max-run-time']

    env = worker.setdefault('env', {})
    env.update({
        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
        'GECKO_HEAD_REV': config.params['head_rev'],
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZHARNESS_URL': {
            'task-reference': mozharness_url
        },
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        "MOZ_NO_REMOTE": '1',
        "XPCOM_DEBUG_BREAK": 'warn',
        "NO_FAIL_ON_TEST_ERRORS": '1',
        "MOZ_HIDE_RESULTS_TABLE": '1',
        "MOZ_NODE_PATH": "/usr/local/bin/node",
        'MOZ_AUTOMATION': '1',
    })
    # talos tests don't need Xvfb
    if is_talos:
        env['NEED_XVFB'] = 'false'

    script = 'test-macosx.sh' if is_macosx else 'test-linux.sh'
    worker['context'] = '{}/raw-file/{}/taskcluster/scripts/tester/{}'.format(
        config.params['head_repository'], config.params['head_rev'], script)

    command = worker['command'] = ["./{}".format(script)]
    command.extend([
        {
            "task-reference": "--installer-url=" + installer_url
        },
        {
            "task-reference":
            "--test-packages-url=" + test_packages_url(taskdesc)
        },
    ])
    if mozharness.get('include-blob-upload-branch'):
        command.append('--blob-upload-branch=' + config.params['project'])
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            command.append('--total-chunk={}'.format(test['chunks']))
            command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    command[i] += suffix

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)
Esempio n. 9
0
def mozharness_test_on_generic_worker(config, job, taskdesc):
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    is_macosx = worker['os'] == 'macosx'
    is_windows = worker['os'] == 'windows'
    is_linux = worker['os'] == 'linux'
    assert is_macosx or is_windows or is_linux

    artifacts = [
        {
            'name': 'public/logs',
            'path': 'logs',
            'type': 'directory'
        },
    ]

    # jittest doesn't have blob_upload_dir
    if test['test-name'] != 'jittest':
        artifacts.append({
            'name': 'public/test_info',
            'path': 'build/blobber_upload_dir',
            'type': 'directory'
        })

    upstream_task = '<build-signing>' if mozharness[
        'requires-signed-builds'] else '<build>'
    installer_url = get_artifact_url(upstream_task,
                                     mozharness['build-artifact-name'])

    taskdesc['scopes'].extend([
        'generic-worker:os-group:{}/{}'.format(job['worker-type'], group)
        for group in test['os-groups']
    ])

    worker['os-groups'] = test['os-groups']

    # run-as-administrator is a feature for workers with UAC enabled and as such should not be
    # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
    # gecko Windows 10 workers, however this may be subject to change. Worker type
    # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
    # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
    # for more information about UAC.
    if test.get('run-as-administrator', False):
        if job['worker-type'].startswith(
                'aws-provisioner-v1/gecko-t-win10-64'):
            taskdesc['scopes'].extend([
                'generic-worker:run-as-administrator:{}'.format(
                    job['worker-type'])
            ])
            worker['run-as-administrator'] = True
        else:
            raise Exception('run-as-administrator not supported on {}'.format(
                job['worker-type']))

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    worker['max-run-time'] = test['max-run-time']
    worker['artifacts'] = artifacts

    env = worker.setdefault('env', {})
    env['MOZ_AUTOMATION'] = '1'
    env['GECKO_HEAD_REPOSITORY'] = config.params['head_repository']
    env['GECKO_HEAD_REV'] = config.params['head_rev']

    # this list will get cleaned up / reduced / removed in bug 1354088
    if is_macosx:
        env.update({
            'IDLEIZER_DISABLE_SHUTDOWN': 'true',
            'LANG': 'en_US.UTF-8',
            'LC_ALL': 'en_US.UTF-8',
            'MOZ_HIDE_RESULTS_TABLE': '1',
            'MOZ_NODE_PATH': '/usr/local/bin/node',
            'MOZ_NO_REMOTE': '1',
            'NO_FAIL_ON_TEST_ERRORS': '1',
            'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin',
            'SHELL': '/bin/bash',
            'XPCOM_DEBUG_BREAK': 'warn',
            'XPC_FLAGS': '0x0',
            'XPC_SERVICE_NAME': '0',
        })

    if is_windows:
        mh_command = [
            'c:\\mozilla-build\\python\\python.exe', '-u',
            'mozharness\\scripts\\' + normpath(mozharness['script'])
        ]
    else:
        # is_linux or is_macosx
        mh_command = [
            'python2.7', '-u', 'mozharness/scripts/' + mozharness['script']
        ]

    for mh_config in mozharness['config']:
        cfg_path = 'mozharness/configs/' + mh_config
        if is_windows:
            cfg_path = normpath(cfg_path)
        mh_command.extend(['--cfg', cfg_path])
    mh_command.extend(mozharness.get('extra-options', []))
    mh_command.extend(['--installer-url', installer_url])
    mh_command.extend(['--test-packages-url', test_packages_url(taskdesc)])
    if mozharness.get('download-symbols'):
        if isinstance(mozharness['download-symbols'], basestring):
            mh_command.extend(
                ['--download-symbols', mozharness['download-symbols']])
        else:
            mh_command.extend(['--download-symbols', 'true'])
    if mozharness.get('include-blob-upload-branch'):
        mh_command.append('--blob-upload-branch=' + config.params['project'])
    mh_command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            mh_command.append('--total-chunk={}'.format(test['chunks']))
            mh_command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(mh_command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    mh_command[i] += suffix

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    worker['mounts'] = [{
        'directory': '.',
        'content': {
            'artifact': get_artifact_path(taskdesc, 'mozharness.zip'),
            'task-id': {
                'task-reference': '<build>'
            }
        },
        'format': 'zip'
    }]

    if is_windows:
        worker['command'] = [{'task-reference': ' '.join(mh_command)}]
    else:  # is_macosx
        mh_command_task_ref = []
        for token in mh_command:
            mh_command_task_ref.append({'task-reference': token})
        worker['command'] = [mh_command_task_ref]
Esempio n. 10
0
def fill_template(config, tasks):
    dummy_tasks = {}

    for task in tasks:
        name = task['name']

        deps = {}
        urls = {}
        previous_artifact = None
        for k in ('original', 'new'):
            value = task[k]
            if isinstance(value, basestring):
                deps[k] = value
                task_id = '<{}>'.format(k)
                os_hint = value
            else:
                index = value['index-search']
                if index not in dummy_tasks:
                    dummy_tasks[index] = {
                        'label': 'index-search-' + index,
                        'description': index,
                        'worker-type': 'invalid/always-optimized',
                        'run': {
                            'using': 'always-optimized',
                        },
                        'optimization': {
                            'index-search': [index],
                        }
                    }
                    yield dummy_tasks[index]
                deps[index] = 'index-search-' + index
                task_id = '<{}>'.format(index)
                os_hint = index.split('.')[-1]
            if 'linux' in os_hint:
                artifact = 'target.tar.bz2'
            elif 'macosx' in os_hint:
                artifact = 'target.dmg'
            elif 'android' in os_hint:
                artifact = 'target.apk'
            elif 'win' in os_hint:
                artifact = 'target.zip'
            else:
                raise Exception(
                    'Cannot figure out the OS for {!r}'.format(value))
            if previous_artifact is not None and previous_artifact != artifact:
                raise Exception('Cannot compare builds from different OSes')
            url = get_artifact_url(task_id, get_artifact_path(task, artifact))
            urls[k] = {'task-reference': url}
            previous_artifact = artifact

        taskdesc = {
            'label':
            'diff-' + name,
            'description':
            name,
            'treeherder': {
                'symbol': task['symbol'],
                'platform': 'diff/opt',
                'kind': 'other',
                'tier': 2,
            },
            'worker-type':
            'aws-provisioner-v1/gecko-{}-b-linux'.format(
                config.params['level']),
            'worker': {
                'docker-image': {
                    'in-tree': 'diffoscope'
                },
                'artifacts': [{
                    'type': 'file',
                    'path': '/builds/worker/diff.html',
                    'name': 'public/diff.html',
                }, {
                    'type': 'file',
                    'path': '/builds/worker/diff.txt',
                    'name': 'public/diff.txt',
                }],
                'env': {
                    'ORIG_URL':
                    urls['original'],
                    'NEW_URL':
                    urls['new'],
                    'DIFFOSCOPE_ARGS':
                    ' '.join(task[k] for k in ('args', 'extra-args')
                             if k in task)
                },
                'max-run-time':
                1800,
            },
            'run': {
                'using':
                'run-task',
                'checkout':
                False,
                'command':
                '/builds/worker/bin/get_and_diffoscope '
                '"$ORIG_URL" "$NEW_URL"',
            },
            'dependencies':
            deps,
        }

        if artifact.endswith('.dmg'):
            taskdesc['toolchains'] = [
                'linux64-cctools-port',
                'linux64-libdmg',
            ]

        yield taskdesc
Esempio n. 11
0
def make_repackage_signing_description(config, jobs):
    for job in jobs:
        dep_job = job['dependent-task']
        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes['repackage_type'] = 'repackage-signing'

        treeherder = job.get('treeherder', {})
        if attributes.get('nightly'):
            treeherder.setdefault('symbol', 'rs(N)')
        else:
            treeherder.setdefault('symbol', 'rs(B)')
        dep_th_platform = dep_job.task.get('extra', {}).get(
            'treeherder', {}).get('machine', {}).get('platform', '')
        treeherder.setdefault('platform',
                              "{}/opt".format(dep_th_platform))
        treeherder.setdefault(
            'tier',
            dep_job.task.get('extra', {}).get('treeherder', {}).get('tier', 1)
            )
        treeherder.setdefault('kind', 'build')

        label = job['label']

        dependencies = {"repackage": dep_job.label}

        signing_dependencies = dep_job.dependencies
        # This is so we get the build task etc in our dependencies to
        # have better beetmover support.
        dependencies.update({k: v for k, v in signing_dependencies.items()
                             if k != 'docker-image'})

        locale_str = ""
        if dep_job.attributes.get('locale'):
            treeherder['symbol'] = 'rs({})'.format(dep_job.attributes.get('locale'))
            attributes['locale'] = dep_job.attributes.get('locale')
            locale_str = "{}/".format(dep_job.attributes.get('locale'))

        description = (
            "Signing of repackaged artifacts for locale '{locale}' for build '"
            "{build_platform}/{build_type}'".format(
                locale=attributes.get('locale', 'en-US'),
                build_platform=attributes.get('build_platform'),
                build_type=attributes.get('build_type')
            )
        )

        build_platform = dep_job.attributes.get('build_platform')
        is_nightly = dep_job.attributes.get('nightly')
        signing_cert_scope = get_signing_cert_scope_per_platform(
            build_platform, is_nightly, config
        )
        scopes = [signing_cert_scope, add_scope_prefix(config, 'signing:format:mar_sha384')]

        upstream_artifacts = [{
            "taskId": {"task-reference": "<repackage>"},
            "taskType": "repackage",
            "paths": [
                get_artifact_path(dep_job, "{}target.complete.mar".format(locale_str)),
            ],
            "formats": ["mar_sha384"]
        }]
        if 'win' in build_platform:
            upstream_artifacts.append({
                "taskId": {"task-reference": "<repackage>"},
                "taskType": "repackage",
                "paths": [
                    get_artifact_path(dep_job, "{}target.installer.exe".format(locale_str)),
                ],
                "formats": ["sha2signcode"]
            })
            scopes.append(add_scope_prefix(config, "signing:format:sha2signcode"))

            use_stub = attributes.get('stub-installer')
            if use_stub:
                upstream_artifacts.append({
                    "taskId": {"task-reference": "<repackage>"},
                    "taskType": "repackage",
                    "paths": [
                        get_artifact_path(
                            dep_job, "{}target.stub-installer.exe".format(locale_str)
                        ),
                    ],
                    "formats": ["sha2signcodestub"]
                })
                scopes.append(add_scope_prefix(config, "signing:format:sha2signcodestub"))

        task = {
            'label': label,
            'description': description,
            'worker-type': get_worker_type_for_scope(config, signing_cert_scope),
            'worker': {'implementation': 'scriptworker-signing',
                       'upstream-artifacts': upstream_artifacts,
                       'max-run-time': 3600},
            'scopes': scopes,
            'dependencies': dependencies,
            'attributes': attributes,
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'treeherder': treeherder,
        }

        yield task
Esempio n. 12
0
def fill_template(config, tasks):
    dummy_tasks = {}

    for task in tasks:
        name = task["name"]

        deps = {}
        urls = {}
        previous_artifact = None
        artifact = task.get("artifact")
        for k in ("original", "new"):
            value = task[k]
            if isinstance(value, text_type):
                deps[k] = value
                dep_name = k
                os_hint = value
            else:
                index = value["index-search"]
                if index not in dummy_tasks:
                    dummy_tasks[index] = {
                        "label": "index-search-" + index,
                        "description": index,
                        "worker-type": "invalid/always-optimized",
                        "run": {
                            "using": "always-optimized",
                        },
                        "optimization": {
                            "index-search": [index],
                        },
                    }
                    yield dummy_tasks[index]
                deps[index] = "index-search-" + index
                dep_name = index
                os_hint = index.split(".")[-1]
            if artifact:
                pass
            elif "linux" in os_hint:
                artifact = "target.tar.bz2"
            elif "macosx" in os_hint:
                artifact = "target.dmg"
            elif "android" in os_hint:
                artifact = "target.apk"
            elif "win" in os_hint:
                artifact = "target.zip"
            else:
                raise Exception("Cannot figure out the OS for {!r}".format(value))
            if previous_artifact is not None and previous_artifact != artifact:
                raise Exception("Cannot compare builds from different OSes")
            urls[k] = {
                "artifact-reference": "<{}/{}>".format(
                    dep_name, get_artifact_path(task, artifact)
                ),
            }
            previous_artifact = artifact

        taskdesc = {
            "label": "diff-" + name,
            "description": name,
            "treeherder": {
                "symbol": task["symbol"],
                "platform": "diff/opt",
                "kind": "other",
                "tier": task["tier"],
            },
            "worker-type": "b-linux",
            "worker": {
                "docker-image": {"in-tree": "diffoscope"},
                "artifacts": [
                    {
                        "type": "file",
                        "path": "/builds/worker/{}".format(f),
                        "name": "public/{}".format(f),
                    }
                    for f in (
                        "diff.html",
                        "diff.txt",
                    )
                ],
                "env": {
                    "ORIG_URL": urls["original"],
                    "NEW_URL": urls["new"],
                    "DIFFOSCOPE_ARGS": " ".join(
                        task[k] for k in ("args", "extra-args") if k in task
                    ),
                    "PRE_DIFF": "; ".join(task.get("pre-diff-commands", [])),
                },
                "max-run-time": 1800,
            },
            "run": {
                "using": "run-task",
                "checkout": task.get("unpack", False),
                "command": "/builds/worker/bin/get_and_diffoscope{}{}".format(
                    " --unpack" if task.get("unpack") else "",
                    " --fail" if task.get("fail-on-diff") else "",
                ),
            },
            "dependencies": deps,
            "optimization": task.get("optimization"),
        }
        if "run-on-projects" in task:
            taskdesc["run-on-projects"] = task["run-on-projects"]

        if artifact.endswith(".dmg"):
            taskdesc.setdefault("fetches", {}).setdefault("toolchain", []).extend(
                [
                    "linux64-cctools-port",
                    "linux64-libdmg",
                ]
            )

        yield taskdesc
Esempio n. 13
0
def generate_specifications_of_artifacts_to_sign(task,
                                                 keep_locale_template=True,
                                                 kind=None):
    build_platform = task.attributes.get('build_platform')
    use_stub = task.attributes.get('stub-installer')
    if kind == 'release-source-signing':
        artifacts_specifications = [{
            'artifacts': [get_artifact_path(task, 'source.tar.xz')],
            'formats': ['gpg'],
        }]
    elif 'android' in build_platform:
        artifacts_specifications = [{
            'artifacts': [
                get_artifact_path(task, '{locale}/target.apk'),
            ],
            'formats': ['jar'],
        }]
    # XXX: Mars aren't signed here (on any platform) because internals will be
    # signed at after this stage of the release
    elif 'macosx' in build_platform:
        if is_partner_kind(kind):
            extension = 'tar.gz'
        else:
            extension = 'dmg'
        artifacts_specifications = [{
            'artifacts': [
                get_artifact_path(task,
                                  '{{locale}}/target.{}'.format(extension))
            ],
            'formats': ['macapp', 'widevine'],
        }]
    elif 'win' in build_platform:
        artifacts_specifications = [{
            'artifacts': [
                get_artifact_path(task, '{locale}/setup.exe'),
            ],
            'formats': ['sha2signcode'],
        }, {
            'artifacts': [
                get_artifact_path(task, '{locale}/target.zip'),
            ],
            'formats': ['sha2signcode', 'widevine'],
        }]

        if use_stub:
            artifacts_specifications[0]['artifacts'] += [
                get_artifact_path(task, '{locale}/setup-stub.exe')
            ]
    elif 'linux' in build_platform:
        artifacts_specifications = [{
            'artifacts': [get_artifact_path(task, '{locale}/target.tar.bz2')],
            'formats': ['gpg', 'widevine'],
        }]
    else:
        raise Exception("Platform not implemented for signing")

    if not keep_locale_template:
        artifacts_specifications = _strip_locale_template(
            artifacts_specifications)

    if is_partner_kind(kind):
        artifacts_specifications = _strip_widevine_for_partners(
            artifacts_specifications)

    return artifacts_specifications
Esempio n. 14
0
def fill_template(config, tasks):
    dummy_tasks = {}

    for task in tasks:
        name = task['name']

        deps = {}
        urls = {}
        previous_artifact = None
        for k in ('original', 'new'):
            value = task[k]
            if isinstance(value, text_type):
                deps[k] = value
                dep_name = k
                os_hint = value
            else:
                index = value['index-search']
                if index not in dummy_tasks:
                    dummy_tasks[index] = {
                        'label': 'index-search-' + index,
                        'description': index,
                        'worker-type': 'invalid/always-optimized',
                        'run': {
                            'using': 'always-optimized',
                        },
                        'optimization': {
                            'index-search': [index],
                        }
                    }
                    yield dummy_tasks[index]
                deps[index] = 'index-search-' + index
                dep_name = index
                os_hint = index.split('.')[-1]
            if 'linux' in os_hint:
                artifact = 'target.tar.bz2'
            elif 'macosx' in os_hint:
                artifact = 'target.dmg'
            elif 'android' in os_hint:
                artifact = 'target.apk'
            elif 'win' in os_hint:
                artifact = 'target.zip'
            else:
                raise Exception(
                    'Cannot figure out the OS for {!r}'.format(value))
            if previous_artifact is not None and previous_artifact != artifact:
                raise Exception(
                    'Cannot compare builds from different OSes')
            urls[k] = {
                'artifact-reference': '<{}/{}>'.format(
                    dep_name, get_artifact_path(task, artifact)),
            }
            previous_artifact = artifact

        taskdesc = {
            'label': 'diff-' + name,
            'description': name,
            'treeherder': {
                'symbol': task['symbol'],
                'platform': 'diff/opt',
                'kind': 'other',
                'tier': 2,
            },
            'worker-type': 'b-linux',
            'worker': {
                'docker-image': {'in-tree': 'diffoscope'},
                'artifacts': [{
                    'type': 'file',
                    'path': '/builds/worker/{}'.format(f),
                    'name': 'public/{}'.format(f),
                } for f in (
                    'diff.html',
                    'diff.txt',
                    'generated-files.diff.html',
                    'generated-files.diff.txt',
                )],
                'env': {
                    'ORIG_URL': urls['original'],
                    'NEW_URL': urls['new'],
                    'DIFFOSCOPE_ARGS': ' '.join(
                        task[k] for k in ('args', 'extra-args') if k in task),
                    'PRE_DIFF': '; '.join(task.get('pre-diff-commands', [])),
                },
                'max-run-time': 1800,
            },
            'run': {
                'using': 'run-task',
                'checkout': task.get('unpack', False),
                'command': '/builds/worker/bin/get_and_diffoscope{}{}'.format(
                    ' --unpack' if task.get('unpack') else '',
                    ' --fail' if task.get('fail-on-diff') else '',
                ),
            },
            'dependencies': deps,
        }
        if 'run-on-projects' in task:
            taskdesc['run-on-projects'] = task['run-on-projects']

        if artifact.endswith('.dmg'):
            taskdesc.setdefault('fetches', {}).setdefault('toolchain', []).extend([
                'linux64-cctools-port',
                'linux64-libdmg',
            ])

        yield taskdesc
Esempio n. 15
0
def make_repackage_signing_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        repack_id = dep_job.task['extra']['repack_id']
        attributes = dep_job.attributes
        build_platform = dep_job.attributes.get('build_platform')
        is_nightly = dep_job.attributes.get(
            'nightly', dep_job.attributes.get('shippable'))

        # Mac & windows
        label = dep_job.label.replace("repackage-", "repackage-signing-")
        # Linux
        label = label.replace("chunking-dummy-", "repackage-signing-")
        description = (
            "Signing of repackaged artifacts for partner repack id '{repack_id}' for build '"
            "{build_platform}/{build_type}'".format(
                repack_id=repack_id,
                build_platform=attributes.get('build_platform'),
                build_type=attributes.get('build_type')))

        if 'linux' in build_platform:
            # we want the repack job, via the dependencies for the the chunking-dummy dep_job
            for dep in dep_job.dependencies.values():
                if dep.startswith('release-partner-repack'):
                    dependencies = {"repack": dep}
                    break
        else:
            # we have a genuine repackage job as our parent
            dependencies = {"repackage": dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes['repackage_type'] = 'repackage-signing'

        signing_cert_scope = get_signing_cert_scope_per_platform(
            build_platform, is_nightly, config)
        scopes = [signing_cert_scope]

        if 'win' in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repackage>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(
                        dep_job, "{}/target.installer.exe".format(repack_id)),
                ],
                "formats": ["autograph_authenticode", "autograph_gpg"]
            }]

            partner_config = get_partner_config_by_kind(config, config.kind)
            partner, subpartner, _ = repack_id.split('/')
            repack_stub_installer = partner_config[partner][subpartner].get(
                'repack_stub_installer')
            if build_platform.startswith('win32') and repack_stub_installer:
                upstream_artifacts.append({
                    "taskId": {
                        "task-reference": "<repackage>"
                    },
                    "taskType":
                    "repackage",
                    "paths": [
                        get_artifact_path(
                            dep_job,
                            "{}/target.stub-installer.exe".format(repack_id)),
                    ],
                    "formats": ["autograph_authenticode", "autograph_gpg"]
                })
        elif 'mac' in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repackage>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(dep_job,
                                      "{}/target.dmg".format(repack_id)),
                ],
                "formats": ["autograph_gpg"]
            }]
        elif 'linux' in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repack>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(dep_job,
                                      "{}/target.tar.bz2".format(repack_id)),
                ],
                "formats": ["autograph_gpg"]
            }]

        task = {
            'label': label,
            'description': description,
            'worker-type': 'linux-signing',
            'worker': {
                'implementation': 'scriptworker-signing',
                'upstream-artifacts': upstream_artifacts,
                'max-run-time': 3600
            },
            'scopes': scopes,
            'dependencies': dependencies,
            'attributes': attributes,
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'extra': {
                'repack_id': repack_id,
            }
        }
        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get('priority'):
            task['priority'] = job['priority']

        yield task
Esempio n. 16
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker'] = job['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    if 'android-em-7.0-x86' in test['test-platform']:
        worker['privileged'] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        ("public/test_info/",
         "{workdir}/workspace/build/blobber_upload_dir/".format(**run)),
    ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        installer_url = get_artifact_url('<build>',
                                         mozharness['build-artifact-name'])

    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('{workdir}/workspace'.format(**run), path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'NEED_COMPIZ': 'true',
        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
        'WORKING_DIR': '/builds/worker',
    })

    # by default, require compiz unless proven otherwise, hence a whitelist.
    # See https://bugzilla.mozilla.org/show_bug.cgi?id=1552563
    # if using regex this list can be shortened greatly.
    suites_not_need_compiz = [
        'mochitest-webgl1-core', 'mochitest-webgl1-ext', 'mochitest-plain-gpu',
        'mochitest-browser-chrome-screenshots', 'gtest', 'cppunittest',
        'jsreftest', 'crashtest', 'reftest', 'reftest-no-accel',
        'web-platform-tests', 'web-platform-tests-reftests', 'xpcshell'
    ]
    if job['run']['test']['suite'] in suites_not_need_compiz or (
            job['run']['test']['suite'] == 'mochitest-plain-chunked'
            and job['run']['test']['try-name'] == 'mochitest-plain-headless'):
        env['NEED_COMPIZ'] = 'false'

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options
    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(
            **run)
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': json.dumps(extra_config)
    }

    command = [
        '{workdir}/bin/test-linux.sh'.format(**run),
    ]
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        command.append('--total-chunk={}'.format(test['chunks']))
        command.append('--this-chunk={}'.format(test['this-chunk']))

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    job['run'] = {
        'workdir': run['workdir'],
        'tooltool-downloads': mozharness['tooltool-downloads'],
        'checkout': test['checkout'],
        'command': command,
        'using': 'run-task',
    }
    configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
Esempio n. 17
0
def mozharness_test_on_generic_worker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker'] = job['worker']

    bitbar_script = 'test-linux.sh'

    is_macosx = worker['os'] == 'macosx'
    is_windows = worker['os'] == 'windows'
    is_linux = worker['os'] == 'linux' or worker['os'] == 'linux-bitbar'
    is_bitbar = worker['os'] == 'linux-bitbar'
    assert is_macosx or is_windows or is_linux

    artifacts = [
        {
            'name': 'public/logs',
            'path': 'logs',
            'type': 'directory'
        },
    ]

    # jittest doesn't have blob_upload_dir
    if test['test-name'] != 'jittest':
        artifacts.append({
            'name': 'public/test_info',
            'path': 'build/blobber_upload_dir',
            'type': 'directory'
        })

    if is_bitbar:
        artifacts = [
            {
                'name': 'public/test/',
                'path': 'artifacts/public',
                'type': 'directory'
            },
            {
                'name': 'public/logs/',
                'path': 'workspace/logs',
                'type': 'directory'
            },
            {
                'name': 'public/test_info/',
                'path': 'workspace/build/blobber_upload_dir',
                'type': 'directory'
            },
        ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        upstream_task = '<build-signing>' if mozharness[
            'requires-signed-builds'] else '<build>'
        installer_url = get_artifact_url(upstream_task,
                                         mozharness['build-artifact-name'])

    worker['os-groups'] = test['os-groups']

    # run-as-administrator is a feature for workers with UAC enabled and as such should not be
    # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
    # gecko Windows 10 workers, however this may be subject to change. Worker type
    # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
    # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
    # for more information about UAC.
    if test.get('run-as-administrator', False):
        if job['worker-type'].startswith('t-win10-64'):
            worker['run-as-administrator'] = True
        else:
            raise Exception('run-as-administrator not supported on {}'.format(
                job['worker-type']))

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    worker['artifacts'] = artifacts

    env = worker.setdefault('env', {})
    env['GECKO_HEAD_REPOSITORY'] = config.params['head_repository']
    env['GECKO_HEAD_REV'] = config.params['head_rev']

    # this list will get cleaned up / reduced / removed in bug 1354088
    if is_macosx:
        env.update({
            'IDLEIZER_DISABLE_SHUTDOWN': 'true',
            'LANG': 'en_US.UTF-8',
            'LC_ALL': 'en_US.UTF-8',
            'MOZ_HIDE_RESULTS_TABLE': '1',
            'MOZ_NODE_PATH': '/usr/local/bin/node',
            'MOZ_NO_REMOTE': '1',
            'NO_FAIL_ON_TEST_ERRORS': '1',
            'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin',
            'SHELL': '/bin/bash',
            'XPCOM_DEBUG_BREAK': 'warn',
            'XPC_FLAGS': '0x0',
            'XPC_SERVICE_NAME': '0',
        })
    elif is_bitbar:
        env.update({
            'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
            'MOZHARNESS_SCRIPT': mozharness['script'],
            'MOZHARNESS_URL': {
                'artifact-reference': '<build/public/build/mozharness.zip>'
            },
            'MOZILLA_BUILD_URL': {
                'task-reference': installer_url
            },
            "MOZ_NO_REMOTE": '1',
            "NEED_XVFB": "false",
            "XPCOM_DEBUG_BREAK": 'warn',
            "NO_FAIL_ON_TEST_ERRORS": '1',
            "MOZ_HIDE_RESULTS_TABLE": '1',
            "MOZ_NODE_PATH": "/usr/local/bin/node",
            'TASKCLUSTER_WORKER_TYPE': job['worker-type'],
        })

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': json.dumps(extra_config)
    }

    if is_windows:
        mh_command = [
            'c:\\mozilla-build\\python\\python.exe', '-u',
            'mozharness\\scripts\\' + normpath(mozharness['script'])
        ]
    elif is_bitbar:
        mh_command = ['bash', "./{}".format(bitbar_script)]
    elif is_macosx and 'macosx1014-64' in test['test-platform']:
        mh_command = [
            '/usr/local/bin/python2', '-u',
            'mozharness/scripts/' + mozharness['script']
        ]
    else:
        # is_linux or is_macosx
        mh_command = [
            # Using /usr/bin/python2.7 rather than python2.7 because
            # /usr/local/bin/python2.7 is broken on the mac workers.
            # See bug #1547903.
            '/usr/bin/python2.7',
            '-u',
            'mozharness/scripts/' + mozharness['script']
        ]

    for mh_config in mozharness['config']:
        cfg_path = 'mozharness/configs/' + mh_config
        if is_windows:
            cfg_path = normpath(cfg_path)
        mh_command.extend(['--cfg', cfg_path])
    mh_command.extend(mozharness.get('extra-options', []))
    if mozharness.get('download-symbols'):
        if isinstance(mozharness['download-symbols'], basestring):
            mh_command.extend(
                ['--download-symbols', mozharness['download-symbols']])
        else:
            mh_command.extend(['--download-symbols', 'true'])
    if mozharness.get('include-blob-upload-branch'):
        mh_command.append('--blob-upload-branch=' + config.params['project'])

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        mh_command.append('--total-chunk={}'.format(test['chunks']))
        mh_command.append('--this-chunk={}'.format(test['this-chunk']))

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    worker['mounts'] = [{
        'directory': '.',
        'content': {
            'artifact': get_artifact_path(taskdesc, 'mozharness.zip'),
            'task-id': {
                'task-reference': '<build>'
            }
        },
        'format': 'zip'
    }]
    if is_bitbar:
        a_url = config.params.file_url(
            'taskcluster/scripts/tester/{}'.format(bitbar_script), )
        worker['mounts'] = [{
            'file': bitbar_script,
            'content': {
                'url': a_url,
            },
        }]

    job['run'] = {
        'workdir': run['workdir'],
        'tooltool-downloads': mozharness['tooltool-downloads'],
        'checkout': test['checkout'],
        'command': mh_command,
        'using': 'run-task',
    }
    if is_bitbar:
        job['run']['run-as-root'] = True
        # FIXME: The bitbar config incorrectly requests internal tooltool downloads
        # so force it off here.
        job['run']['tooltool-downloads'] = False
    configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])
Esempio n. 18
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job["run"]
    test = taskdesc["run"]["test"]
    mozharness = test["mozharness"]
    worker = taskdesc["worker"] = job["worker"]

    # apply some defaults
    worker["docker-image"] = test["docker-image"]
    worker["allow-ptrace"] = True  # required for all tests, for crashreporter
    worker["loopback-video"] = test["loopback-video"]
    worker["loopback-audio"] = test["loopback-audio"]
    worker["max-run-time"] = test["max-run-time"]
    worker["retry-exit-status"] = test["retry-exit-status"]
    if "android-em-7.0-x86" in test["test-platform"]:
        worker["privileged"] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        (
            "public/test_info/",
            "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
        ),
    ]

    installer = installer_url(taskdesc)

    mozharness_url = get_artifact_url(
        "<build>", get_artifact_path(taskdesc, "mozharness.zip"))

    worker.setdefault("artifacts", [])
    worker["artifacts"].extend([{
        "name":
        prefix,
        "path":
        os.path.join("{workdir}/workspace".format(**run), path),
        "type":
        "directory",
    } for (prefix, path) in artifacts])

    env = worker.setdefault("env", {})
    env.update({
        "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
        "MOZHARNESS_SCRIPT": mozharness["script"],
        "MOZILLA_BUILD_URL": {
            "task-reference": installer
        },
        "NEED_PULSEAUDIO": "true",
        "NEED_WINDOW_MANAGER": "true",
        "ENABLE_E10S": text_type(bool(test.get("e10s"))).lower(),
        "WORKING_DIR": "/builds/worker",
    })

    if test.get("python-3"):
        env["PYTHON"] = "python3"

    # Legacy linux64 tests rely on compiz.
    if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
        env.update({"NEED_COMPIZ": "true"})

    # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
    # when manipulating windows.
    if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
        if "wdspec" in job["run"]["test"]["suite"] or (
                "marionette" in job["run"]["test"]["suite"]
                and "headless" not in job["label"]):
            env.update({"NEED_COMPIZ": "true"})

    if mozharness.get("mochitest-flavor"):
        env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]

    if mozharness["set-moz-node-path"]:
        env["MOZ_NODE_PATH"] = "/usr/local/bin/node"

    if "actions" in mozharness:
        env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])

    if config.params.is_try():
        env["TRY_COMMIT_MSG"] = config.params["message"]

    # handle some of the mozharness-specific options
    if test["reboot"]:
        raise Exception("reboot: {} not supported on generic-worker".format(
            test["reboot"]))

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test["checkout"]:
        env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
            **run)
    else:
        env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}

    extra_config = {
        "installer_url": installer,
        "test_packages_url": test_packages_url(taskdesc),
    }
    env["EXTRA_MOZHARNESS_CONFIG"] = {
        "task-reference":
        six.ensure_text(json.dumps(extra_config, sort_keys=True))
    }

    # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
    # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
    if "web-platform-tests" in test["suite"] or re.match(
            "test-(coverage|verify)-wpt", test["suite"]):
        env["TESTS_BY_MANIFEST_URL"] = {
            "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
        }

    command = [
        "{workdir}/bin/test-linux.sh".format(**run),
    ]
    command.extend(mozharness.get("extra-options", []))

    if test.get("test-manifests"):
        env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
            json.dumps({test["suite"]: test["test-manifests"]},
                       sort_keys=True))

    # TODO: remove the need for run['chunked']
    elif mozharness.get("chunked") or test["chunks"] > 1:
        command.append("--total-chunk={}".format(test["chunks"]))
        command.append("--this-chunk={}".format(test["this-chunk"]))

    if "download-symbols" in mozharness:
        download_symbols = mozharness["download-symbols"]
        download_symbols = {
            True: "true",
            False: "false"
        }.get(download_symbols, download_symbols)
        command.append("--download-symbols=" + download_symbols)

    job["run"] = {
        "workdir": run["workdir"],
        "tooltool-downloads": mozharness["tooltool-downloads"],
        "checkout": test["checkout"],
        "command": command,
        "using": "run-task",
    }
    configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
Esempio n. 19
0
def mozharness_test_on_generic_worker(config, job, taskdesc):
    test = taskdesc["run"]["test"]
    mozharness = test["mozharness"]
    worker = taskdesc["worker"] = job["worker"]

    bitbar_script = "test-linux.sh"

    is_macosx = worker["os"] == "macosx"
    is_windows = worker["os"] == "windows"
    is_linux = worker["os"] == "linux" or worker["os"] == "linux-bitbar"
    is_bitbar = worker["os"] == "linux-bitbar"
    assert is_macosx or is_windows or is_linux

    artifacts = [
        {
            "name": "public/logs",
            "path": "logs",
            "type": "directory"
        },
    ]

    # jittest doesn't have blob_upload_dir
    if test["test-name"] != "jittest":
        artifacts.append({
            "name": "public/test_info",
            "path": "build/blobber_upload_dir",
            "type": "directory",
        })

    if is_bitbar:
        artifacts = [
            {
                "name": "public/test/",
                "path": "artifacts/public",
                "type": "directory"
            },
            {
                "name": "public/logs/",
                "path": "workspace/logs",
                "type": "directory"
            },
            {
                "name": "public/test_info/",
                "path": "workspace/build/blobber_upload_dir",
                "type": "directory",
            },
        ]

    installer = installer_url(taskdesc)

    worker["os-groups"] = test["os-groups"]

    # run-as-administrator is a feature for workers with UAC enabled and as such should not be
    # included in tasks on workers that have UAC disabled. Currently UAC is only enabled on
    # gecko Windows 10 workers, however this may be subject to change. Worker type
    # environment definitions can be found in https://github.com/mozilla-releng/OpenCloudConfig
    # See https://docs.microsoft.com/en-us/windows/desktop/secauthz/user-account-control
    # for more information about UAC.
    if test.get("run-as-administrator", False):
        if job["worker-type"].startswith("t-win10-64"):
            worker["run-as-administrator"] = True
        else:
            raise Exception("run-as-administrator not supported on {}".format(
                job["worker-type"]))

    if test["reboot"]:
        raise Exception("reboot: {} not supported on generic-worker".format(
            test["reboot"]))

    worker["max-run-time"] = test["max-run-time"]
    worker["retry-exit-status"] = test["retry-exit-status"]
    worker.setdefault("artifacts", [])
    worker["artifacts"].extend(artifacts)

    env = worker.setdefault("env", {})
    env["GECKO_HEAD_REPOSITORY"] = config.params["head_repository"]
    env["GECKO_HEAD_REV"] = config.params["head_rev"]

    # this list will get cleaned up / reduced / removed in bug 1354088
    if is_macosx:
        env.update({
            "LC_ALL": "en_US.UTF-8",
            "LANG": "en_US.UTF-8",
            "MOZ_NODE_PATH": "/usr/local/bin/node",
            "PATH": "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
            "SHELL": "/bin/bash",
        })
    elif is_bitbar:
        env.update({
            "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
            "MOZHARNESS_SCRIPT": mozharness["script"],
            "MOZHARNESS_URL": {
                "artifact-reference": "<build/public/build/mozharness.zip>"
            },
            "MOZILLA_BUILD_URL": {
                "task-reference": installer
            },
            "MOZ_NO_REMOTE": "1",
            "NEED_XVFB": "false",
            "XPCOM_DEBUG_BREAK": "warn",
            "NO_FAIL_ON_TEST_ERRORS": "1",
            "MOZ_HIDE_RESULTS_TABLE": "1",
            "MOZ_NODE_PATH": "/usr/local/bin/node",
            "TASKCLUSTER_WORKER_TYPE": job["worker-type"],
        })

    extra_config = {
        "installer_url": installer,
        "test_packages_url": test_packages_url(taskdesc),
    }
    env["EXTRA_MOZHARNESS_CONFIG"] = {
        "task-reference":
        six.ensure_text(json.dumps(extra_config, sort_keys=True))
    }

    # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
    # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
    if "web-platform-tests" in test["suite"] or re.match(
            "test-(coverage|verify)-wpt", test["suite"]):
        env["TESTS_BY_MANIFEST_URL"] = {
            "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
        }

    py_3 = test.get("python-3", False)

    if is_windows:
        py_binary = "c:\\mozilla-build\\{python}\\{python}.exe".format(
            python="python3" if py_3 else "python")
        mh_command = [
            py_binary,
            "-u",
            "mozharness\\scripts\\" + normpath(mozharness["script"]),
        ]
    elif is_bitbar:
        py_binary = "python3" if py_3 else "python"
        mh_command = ["bash", "./{}".format(bitbar_script)]
    elif is_macosx and "macosx1014-64" in test["test-platform"]:
        py_binary = "/usr/local/bin/{}".format(
            "python3" if py_3 else "python2")
        mh_command = [
            py_binary,
            "-u",
            "mozharness/scripts/" + mozharness["script"],
        ]
    else:
        # is_linux or is_macosx
        py_binary = "/usr/bin/{}".format("python3" if py_3 else "python2")
        mh_command = [
            # Using /usr/bin/python2.7 rather than python2.7 because
            # /usr/local/bin/python2.7 is broken on the mac workers.
            # See bug #1547903.
            py_binary,
            "-u",
            "mozharness/scripts/" + mozharness["script"],
        ]

    if py_3:
        env["PYTHON"] = py_binary

    for mh_config in mozharness["config"]:
        cfg_path = "mozharness/configs/" + mh_config
        if is_windows:
            cfg_path = normpath(cfg_path)
        mh_command.extend(["--cfg", cfg_path])
    mh_command.extend(mozharness.get("extra-options", []))
    if mozharness.get("download-symbols"):
        if isinstance(mozharness["download-symbols"], text_type):
            mh_command.extend(
                ["--download-symbols", mozharness["download-symbols"]])
        else:
            mh_command.extend(["--download-symbols", "true"])
    if mozharness.get("include-blob-upload-branch"):
        mh_command.append("--blob-upload-branch=" + config.params["project"])

    if test.get("test-manifests"):
        env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
            json.dumps({test["suite"]: test["test-manifests"]},
                       sort_keys=True))

    # TODO: remove the need for run['chunked']
    elif mozharness.get("chunked") or test["chunks"] > 1:
        mh_command.append("--total-chunk={}".format(test["chunks"]))
        mh_command.append("--this-chunk={}".format(test["this-chunk"]))

    if config.params.is_try():
        env["TRY_COMMIT_MSG"] = config.params["message"]

    worker["mounts"] = [{
        "directory": "mozharness",
        "content": {
            "artifact": get_artifact_path(taskdesc, "mozharness.zip"),
            "task-id": {
                "task-reference": "<build>"
            },
        },
        "format": "zip",
    }]
    if is_bitbar:
        a_url = config.params.file_url(
            "taskcluster/scripts/tester/{}".format(bitbar_script), )
        worker["mounts"] = [{
            "file": bitbar_script,
            "content": {
                "url": a_url,
            },
        }]

    job["run"] = {
        "tooltool-downloads": mozharness["tooltool-downloads"],
        "checkout": test["checkout"],
        "command": mh_command,
        "using": "run-task",
    }
    if is_bitbar:
        job["run"]["run-as-root"] = True
    configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
Esempio n. 20
0
def test_packages_url(taskdesc):
    """Account for different platforms that name their test packages differently"""
    return get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'target.test_packages.json'))
Esempio n. 21
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    if 'android-em-7.0-x86' in test['test-platform']:
        worker['privileged'] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/build/upload/logs/".format(**run)
         ),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        ("public/test_info/",
         "{workdir}/workspace/build/blobber_upload_dir/".format(**run)),
    ]

    installer_url = get_artifact_url('<build>',
                                     mozharness['build-artifact-name'])
    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('{workdir}/workspace'.format(**run), path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    worker['caches'] = [{
        'type':
        'persistent',
        'name':
        'level-{}-{}-test-workspace'.format(config.params['level'],
                                            config.params['project']),
        'mount-point':
        "{workdir}/workspace".format(**run),
    }]

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
        'MOZ_AUTOMATION': '1',
        'WORKING_DIR': '/builds/worker',
    })

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options

    if mozharness['tooltool-downloads']:
        docker_worker_add_tooltool(config, job, taskdesc, internal=True)

    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # assemble the command line
    command = [
        '{workdir}/bin/run-task'.format(**run),
    ]

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        command.extend(
            ['--vcs-checkout', '{workdir}/checkouts/gecko'.format(**run)])
        env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(
            **run)
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    command.extend([
        '--',
        '{workdir}/bin/test-linux.sh'.format(**run),
    ])

    command.extend([
        {
            "task-reference": "--installer-url=" + installer_url
        },
        {
            "task-reference":
            "--test-packages-url=" + test_packages_url(taskdesc)
        },
    ])
    command.extend(mozharness.get('extra-options', []))

    # TODO: remove the need for run['chunked']
    if mozharness.get('chunked') or test['chunks'] > 1:
        # Implement mozharness['chunking-args'], modifying command in place
        if mozharness['chunking-args'] == 'this-chunk':
            command.append('--total-chunk={}'.format(test['chunks']))
            command.append('--this-chunk={}'.format(test['this-chunk']))
        elif mozharness['chunking-args'] == 'test-suite-suffix':
            suffix = mozharness['chunk-suffix'].replace(
                '<CHUNK>', str(test['this-chunk']))
            for i, c in enumerate(command):
                if isinstance(c, basestring) and c.startswith('--test-suite'):
                    command[i] += suffix

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    worker['command'] = command
Esempio n. 22
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job['run']
    test = taskdesc['run']['test']
    mozharness = test['mozharness']
    worker = taskdesc['worker'] = job['worker']

    # apply some defaults
    worker['docker-image'] = test['docker-image']
    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
    worker['loopback-video'] = test['loopback-video']
    worker['loopback-audio'] = test['loopback-audio']
    worker['max-run-time'] = test['max-run-time']
    worker['retry-exit-status'] = test['retry-exit-status']
    if 'android-em-7.0-x86' in test['test-platform']:
        worker['privileged'] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs/", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        ("public/test_info/",
         "{workdir}/workspace/build/blobber_upload_dir/".format(**run)),
    ]

    if 'installer-url' in mozharness:
        installer_url = mozharness['installer-url']
    else:
        installer_url = get_artifact_url('<build>',
                                         mozharness['build-artifact-name'])

    mozharness_url = get_artifact_url(
        '<build>', get_artifact_path(taskdesc, 'mozharness.zip'))

    worker['artifacts'] = [{
        'name':
        prefix,
        'path':
        os.path.join('{workdir}/workspace'.format(**run), path),
        'type':
        'directory',
    } for (prefix, path) in artifacts]

    env = worker.setdefault('env', {})
    env.update({
        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
        'MOZHARNESS_SCRIPT': mozharness['script'],
        'MOZILLA_BUILD_URL': {
            'task-reference': installer_url
        },
        'NEED_PULSEAUDIO': 'true',
        'NEED_WINDOW_MANAGER': 'true',
        'ENABLE_E10S': text_type(bool(test.get('e10s'))).lower(),
        'WORKING_DIR': '/builds/worker',
    })

    # Legacy linux64 tests rely on compiz.
    if test.get('docker-image', {}).get('in-tree') == 'desktop1604-test':
        env.update({'NEED_COMPIZ': 'true'})

    # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
    # when manipulating windows.
    if test.get('docker-image', {}).get('in-tree') == 'ubuntu1804-test':
        if ('wdspec' in job['run']['test']['suite']
                or ('marionette' in job['run']['test']['suite']
                    and 'headless' not in job['label'])):
            env.update({'NEED_COMPIZ': 'true'})

    if mozharness.get('mochitest-flavor'):
        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']

    if mozharness['set-moz-node-path']:
        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'

    if 'actions' in mozharness:
        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])

    if config.params.is_try():
        env['TRY_COMMIT_MSG'] = config.params['message']

    # handle some of the mozharness-specific options
    if test['reboot']:
        raise Exception('reboot: {} not supported on generic-worker'.format(
            test['reboot']))

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test['checkout']:
        env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(
            **run)
    else:
        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}

    extra_config = {
        'installer_url': installer_url,
        'test_packages_url': test_packages_url(taskdesc),
    }
    env['EXTRA_MOZHARNESS_CONFIG'] = {
        'task-reference': six.ensure_text(json.dumps(extra_config))
    }

    command = [
        '{workdir}/bin/test-linux.sh'.format(**run),
    ]
    command.extend(mozharness.get('extra-options', []))

    if test.get('test-manifests'):
        env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
            json.dumps({test['suite']: test['test-manifests']}))

    # TODO: remove the need for run['chunked']
    elif mozharness.get('chunked') or test['chunks'] > 1:
        command.append('--total-chunk={}'.format(test['chunks']))
        command.append('--this-chunk={}'.format(test['this-chunk']))

    if 'download-symbols' in mozharness:
        download_symbols = mozharness['download-symbols']
        download_symbols = {
            True: 'true',
            False: 'false'
        }.get(download_symbols, download_symbols)
        command.append('--download-symbols=' + download_symbols)

    job['run'] = {
        'workdir': run['workdir'],
        'tooltool-downloads': mozharness['tooltool-downloads'],
        'checkout': test['checkout'],
        'command': command,
        'using': 'run-task',
    }
    configure_taskdesc_for_run(config, job, taskdesc, worker['implementation'])