Пример #1
0
def test_get_release_props(context, mocker, taskjson, locale, relprops, expected):
    context.task = get_fake_valid_task(taskjson)
    if locale:
        context.task['payload']['locale'] = 'lang'

    context.task['payload']['releaseProperties'] = relprops
    assert get_release_props(context) == (expected, None)

    # also check balrog_props method works with same data
    # TODO remove the end of this function when method is not supported anymore
    del context.task['payload']['releaseProperties']

    context.task['payload']['upstreamArtifacts'] = [{
      "locale": "lang",
      "paths": [
        "public/build/lang/balrog_props.json"
      ],
      "taskId": "buildTaskId",
      "taskType": "build"
    }]

    balrog_props_path = os.path.abspath(os.path.join(context.config['work_dir'], 'cot', 'buildTaskId', 'public/build/lang/balrog_props.json'))
    makedirs(os.path.dirname(balrog_props_path))
    with open(balrog_props_path, 'w') as f:
        json.dump({
            'properties': relprops
        }, f)

    assert get_release_props(context) == (expected, balrog_props_path)
Пример #2
0
def test_get_release_props(context, mocker, taskjson, locale, relprops,
                           expected):
    context.task = get_fake_valid_task(taskjson)
    if locale:
        context.task["payload"]["locale"] = "lang"

    context.task["payload"]["releaseProperties"] = relprops
    assert get_release_props(context) == expected

    context.task["payload"]["releaseProperties"] = None
    with pytest.raises(ScriptWorkerTaskException):
        get_release_props(context)
Пример #3
0
async def push_to_maven(context):
    """Push artifacts to maven locations expected by clients such as mvn/gradle)

    XXX: This function handles the transfer of artifacts to the maven.mozilla.org repository.
    The artifacts being published come from different projects and are all handled here. For
    example as of Q4 2020, we use this function for beetmover releases coming from
    Android-Components, Glean, Application-Services but also in-tree GeckoView.

    This large variety of beetmover jobs usually implies complex checks to cover
    all possible cornercases. For example it needs to handle both MavenVersion for
    Github projects but also FirefoxVersion for GeckoView in-tree releases.
    """
    context.release_props = get_release_props(context)
    context.checksums = dict()  # Needed by downstream calls
    context.raw_balrog_manifest = dict()  # Needed by downstream calls

    # Version validation
    version = task.get_maven_version(context)
    task.check_maven_artifact_map(context, version)

    # overwrite artifacts_to_beetmove with the declarative artifacts ones
    context.artifacts_to_beetmove = task.get_upstream_artifacts(
        context, preserve_full_paths=True)
    await move_beets(context,
                     context.artifacts_to_beetmove,
                     artifact_map=context.task["payload"]["artifactMap"])
Пример #4
0
async def push_to_maven(context):
    """Push artifacts to locations expected by maven clients (like mvn or gradle)"""
    artifacts_to_beetmove = task.get_upstream_artifacts_with_zip_extract_param(context)
    context.release_props = get_release_props(context)
    context.checksums = dict()  # Needed by downstream calls
    context.raw_balrog_manifest = dict()    # Needed by downstream calls

    if context.task['payload'].get('artifactMap'):
        context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove(
            artifacts_to_beetmove,
            context.config.get('zip_max_file_size_in_mb', DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB),
            artifact_map=context.task['payload'].get('artifactMap')
        )

        await move_beets(context, context.artifacts_to_beetmove, artifact_map=context.task['payload']['artifactMap'])
    else:
        mapping_manifest = generate_beetmover_manifest(context)
        validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path'])

        context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove(
            artifacts_to_beetmove,
            context.config.get('zip_max_file_size_in_mb', DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB),
            mapping_manifest=mapping_manifest
        )

        await move_beets(context, context.artifacts_to_beetmove, manifest=mapping_manifest)
Пример #5
0
def test_get_destination_for_partner_repack_path(context, full_path, expected,
                                                 bucket, raises, locale):
    context.bucket = bucket
    context.action = 'push-to-partner'
    context.task['payload']['build_number'] = 99
    context.task['payload']['version'] = '9999.0'
    context.task['payload']['releaseProperties'] = {
        "appName": "Firefox",
        "buildid": "20180328233904",
        "appVersion": "9999.0",
        "hashType": "sha512",
        "platform": "linux",
        "branch": "maple"
    }
    # hack in locale
    for artifact_dict in context.task['payload']['upstreamArtifacts']:
        artifact_dict['locale'] = locale
    context.artifacts_to_beetmove = get_upstream_artifacts(
        context, preserve_full_paths=True)
    context.release_props = get_release_props(context)
    mapping_manifest = generate_beetmover_manifest(context)

    if raises:
        context.action = 'push-to-dummy'
        with pytest.raises(ScriptWorkerRetryException):
            get_destination_for_partner_repack_path(context, mapping_manifest,
                                                    full_path, locale)
    else:
        assert expected == get_destination_for_partner_repack_path(
            context, mapping_manifest, full_path, locale)
Пример #6
0
async def push_to_nightly(context):
    """Push artifacts to a certain location (e.g. nightly/ or candidates/).

    Determine the list of artifacts to be transferred, generate the
    mapping manifest, run some data validations, and upload the bits.

    Upon successful transfer, generate checksums files and manifests to be
    consumed downstream by balrogworkers."""
    context.release_props = get_release_props(context)

    # balrog_manifest is written and uploaded as an artifact which is used by
    # a subsequent balrogworker task in the release graph. Balrogworker uses
    # this manifest to submit release blob info (e.g. mar filename, size, etc)
    context.balrog_manifest = list()

    # Used as a staging area to generate balrog_manifest, so that all the
    # completes and partials for a release end up in the same data structure
    context.raw_balrog_manifest = dict()

    # the checksums manifest is written and uploaded as an artifact which is
    # used by a subsequent signing task and again by another beetmover task to
    # upload it to S3
    context.checksums = dict()

    # TODO: if artifactMap passes schema validation
    if context.task['payload'].get('artifactMap'):
        # determine artifacts to beetmove
        context.artifacts_to_beetmove = get_upstream_artifacts(
            context, preserve_full_paths=True)
        await move_beets(context,
                         context.artifacts_to_beetmove,
                         artifact_map=context.task['payload']['artifactMap'])
    else:
        # determine artifacts to beetmove
        context.artifacts_to_beetmove = get_upstream_artifacts(context)

        # generate beetmover mapping manifest
        mapping_manifest = generate_beetmover_manifest(context)

        # perform another validation check against the bucket path
        validate_bucket_paths(context.bucket,
                              mapping_manifest['s3_bucket_path'])

        # some files to-be-determined via script configs need to have their
        # contents pretty named, so doing it here before even beetmoving begins
        blobs = context.config.get('blobs_needing_prettynaming_contents', [])
        alter_unpretty_contents(context, blobs, mapping_manifest)

        # for each artifact in manifest
        #   a. map each upstream artifact to pretty name release bucket format
        #   b. upload to corresponding S3 location
        await move_beets(context,
                         context.artifacts_to_beetmove,
                         manifest=mapping_manifest)

    #  write balrog_manifest to a file and add it to list of artifacts
    add_balrog_manifest_to_artifacts(context)
    # determine the correct checksum filename and generate it, adding it to
    # the list of artifacts afterwards
    add_checksums_to_artifacts(context)
Пример #7
0
def test_get_release_props(context, mocker, taskjson, locale, relprops,
                           expected):
    context.task = get_fake_valid_task(taskjson)
    if locale:
        context.task['payload']['locale'] = 'lang'

    context.task['payload']['releaseProperties'] = relprops
    assert get_release_props(context) == expected
Пример #8
0
async def push_to_maven(context):
    """Push artifacts to maven locations expected by clients such as mvn/gradle)"""
    context.release_props = get_release_props(context)
    context.checksums = dict()  # Needed by downstream calls
    context.raw_balrog_manifest = dict()  # Needed by downstream calls

    # overwrite artifacts_to_beetmove with the declarative artifacts ones
    context.artifacts_to_beetmove = task.get_upstream_artifacts(context, preserve_full_paths=True)
    await move_beets(context, context.artifacts_to_beetmove, artifact_map=context.task["payload"]["artifactMap"])
Пример #9
0
async def test_move_partner_beets(context, mocker):
    context.artifacts_to_beetmove = get_upstream_artifacts(context, preserve_full_paths=True)
    context.release_props = get_release_props(context)
    mocker.patch("beetmoverscript.utils.JINJA_ENV", get_test_jinja_env())
    mapping_manifest = generate_beetmover_manifest(context)

    mocker.patch.object(beetmoverscript.script, "get_destination_for_partner_repack_path", new=noop_sync)
    mocker.patch.object(beetmoverscript.script, "upload_to_s3", new=noop_async)
    await move_partner_beets(context, mapping_manifest)
Пример #10
0
async def push_to_partner(context):
    """Push private repack artifacts to a certain location. They can be either
    private (to private S3 buckets) or public (going under regular firefox
    bucket).

    Determine the list of artifacts to be transferred, generate the
    mapping manifest and upload the bits."""
    context.artifacts_to_beetmove = get_upstream_artifacts(
        context, preserve_full_paths=True)
    context.release_props, release_props_file = get_release_props(context)

    mapping_manifest = generate_beetmover_manifest(context)
    await move_partner_beets(context, mapping_manifest)
Пример #11
0
async def push_to_maven(context):
    """Push artifacts to locations expected by maven clients (like mvn or gradle)"""
    context.release_props = get_release_props(context)
    context.checksums = dict()  # Needed by downstream calls
    context.raw_balrog_manifest = dict()  # Needed by downstream calls

    # XXX this is needed in order to avoid the need to land the in-tree
    # corresponding patches across all trees altogether. Similarly for github
    # projects. This allows us a gradual rollout of this across all projects
    is_zip_archive = any([
        d.get('zipExtract')
        for d in context.task['payload']['upstreamArtifacts']
    ])
    if context.task['payload'].get('artifactMap'):
        if is_zip_archive:
            artifacts_to_beetmove = task.get_upstream_artifacts_with_zip_extract_param(
                context)
            context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove(
                artifacts_to_beetmove,
                context.config.get('zip_max_file_size_in_mb',
                                   DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB),
                artifact_map=context.task['payload'].get('artifactMap'))
            await move_beets(
                context,
                context.artifacts_to_beetmove,
                artifact_map=context.task['payload']['artifactMap'])
        else:
            # overwrite artifacts_to_beetmove with the declarative artifacts ones
            context.artifacts_to_beetmove = task.get_upstream_artifacts(
                context, preserve_full_paths=True)
            await move_beets(
                context,
                context.artifacts_to_beetmove,
                artifact_map=context.task['payload']['artifactMap'])
    else:
        # TODO: remove this once we're done with migrating from maven.zip
        artifacts_to_beetmove = task.get_upstream_artifacts_with_zip_extract_param(
            context)
        mapping_manifest = generate_beetmover_manifest(context)
        validate_bucket_paths(context.bucket,
                              mapping_manifest['s3_bucket_path'])

        context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove(
            artifacts_to_beetmove,
            context.config.get('zip_max_file_size_in_mb',
                               DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB),
            mapping_manifest=mapping_manifest)

        await move_beets(context,
                         context.artifacts_to_beetmove,
                         manifest=mapping_manifest)
Пример #12
0
async def push_to_nightly(context):
    """Push artifacts to a certain location (e.g. nightly/ or candidates/).

    Determine the list of artifacts to be transferred, generate the
    mapping manifest, run some data validations, and upload the bits.

    Upon successful transfer, generate checksums files and manifests to be
    consumed downstream by balrogworkers."""
    context.release_props = get_release_props(context)

    # balrog_manifest is written and uploaded as an artifact which is used by
    # a subsequent balrogworker task in the release graph. Balrogworker uses
    # this manifest to submit release blob info (e.g. mar filename, size, etc)
    context.balrog_manifest = list()

    # Used as a staging area to generate balrog_manifest, so that all the
    # completes and partials for a release end up in the same data structure
    context.raw_balrog_manifest = dict()

    # the checksums manifest is written and uploaded as an artifact which is
    # used by a subsequent signing task and again by another beetmover task to
    # upload it to S3
    context.checksums = dict()

    # TODO: if artifactMap passes schema validation
    if context.task["payload"].get("artifactMap"):
        # determine artifacts to beetmove
        context.artifacts_to_beetmove = get_upstream_artifacts(
            context, preserve_full_paths=True)
        await move_beets(context,
                         context.artifacts_to_beetmove,
                         artifact_map=context.task["payload"]["artifactMap"])
    else:
        raise ScriptWorkerTaskException("task payload is missing artifactMap")

    #  write balrog_manifest to a file and add it to list of artifacts
    add_balrog_manifest_to_artifacts(context)
    # determine the correct checksum filename and generate it, adding it to
    # the list of artifacts afterwards
    add_checksums_to_artifacts(context)
Пример #13
0
def test_get_release_props_raises(context, mocker):
    context.task = get_fake_valid_task(taskjson="task_missing_relprops.json")
    with pytest.raises(ScriptWorkerTaskException):
        get_release_props(context)