Ejemplo n.º 1
0
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = graph_config['release-promotion']['flavors'][
        release_promotion_flavor]

    target_tasks_method = promotion_config['target-tasks-method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds') or promotion_config.get(
        'rebuild-kinds', [])
    do_not_optimize = input.get('do_not_optimize') or promotion_config.get(
        'do-not-optimize', [])

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters['optimize_target_tasks'] = True
    parameters['xpi_name'] = input['xpi_name']
    # TODO
    #  - require this is a specific revision
    #  - possibly also check that this is on a reviewed PR or merged into
    #    a trusted branch. this will require an oauth token
    parameters['xpi_revision'] = input.get('revision', 'master')
    parameters['shipping_phase'] = input['release_promotion_flavor']

    # We blow away `tasks_for` when we load the on-push decision task's
    # parameters.yml. Let's set this back to `action`.
    parameters['tasks_for'] = "action"

    if input.get('version'):
        parameters['version'] = input['version']

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
Ejemplo n.º 2
0
def geckoprofile_action(parameters, graph_config, input, task_group_id, task_id):
    task = taskcluster.get_task_definition(task_id)
    label = task['metadata']['name']
    pushes = []
    depth = 2
    end_id = int(parameters['pushlog_id'])

    while True:
        start_id = max(end_id - depth, 0)
        pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id)
        r = requests.get(pushlog_url)
        r.raise_for_status()
        pushes = pushes + list(r.json()['pushes'].keys())
        if len(pushes) >= depth:
            break

        end_id = start_id - 1
        start_id -= depth
        if start_id < 0:
            break

    pushes = sorted(pushes)[-depth:]
    backfill_pushes = []

    for push in pushes:
        try:
            full_task_graph = get_artifact_from_index(
                    INDEX_TMPL.format(parameters['project'], push),
                    'public/full-task-graph.json')
            _, full_task_graph = TaskGraph.from_json(full_task_graph)
            label_to_taskid = get_artifact_from_index(
                    INDEX_TMPL.format(parameters['project'], push),
                    'public/label-to-taskid.json')
            push_params = get_artifact_from_index(
                    INDEX_TMPL.format(parameters['project'], push),
                    'public/parameters.yml')
            push_decision_task_id = find_decision_task(push_params, graph_config)
        except HTTPError as e:
            logger.info('Skipping {} due to missing index artifacts! Error: {}'.format(push, e))
            continue

        if label in full_task_graph.tasks.keys():
            def modifier(task):
                if task.label != label:
                    return task

                cmd = task.task['payload']['command']
                task.task['payload']['command'] = add_args_to_perf_command(
                        cmd, ['--gecko-profile'])
                task.task['extra']['treeherder']['symbol'] += '-p'
                return task

            create_tasks(graph_config, [label], full_task_graph, label_to_taskid,
                         push_params, push_decision_task_id, push, modifier=modifier)
            backfill_pushes.append(push)
        else:
            logging.info('Could not find {} on {}. Skipping.'.format(label, push))
    combine_task_graph_files(backfill_pushes)
Ejemplo n.º 3
0
def fetch_graph_and_labels(parameters, graph_config):
    decision_task_id = find_decision_task(parameters, graph_config)

    # First grab the graph and labels generated during the initial decision task
    full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json")
    _, full_task_graph = TaskGraph.from_json(full_task_graph)
    label_to_taskid = get_artifact(decision_task_id, "public/label-to-taskid.json")

    # fetch everything in parallel; this avoids serializing any delay in downloading
    # each artifact (such as waiting for the artifact to be mirrored locally)
    with futures.ThreadPoolExecutor(CONCURRENCY) as e:
        fetches = []

        # fetch any modifications made by action tasks and swap out new tasks
        # for old ones
        def fetch_action(task_id):
            logger.info(f"fetching label-to-taskid.json for action task {task_id}")
            try:
                run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
                label_to_taskid.update(run_label_to_id)
            except HTTPError as e:
                if e.response.status_code != 404:
                    raise
                logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")

        namespace = "{}.v2.{}.pushlog-id.{}.actions".format(
            graph_config["trust-domain"],
            parameters["project"],
            parameters["pushlog_id"],
        )
        for task_id in list_tasks(namespace):
            fetches.append(e.submit(fetch_action, task_id))

        # Similarly for cron tasks..
        def fetch_cron(task_id):
            logger.info(f"fetching label-to-taskid.json for cron task {task_id}")
            try:
                run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
                label_to_taskid.update(run_label_to_id)
            except HTTPError as e:
                if e.response.status_code != 404:
                    raise
                logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")

        namespace = "{}.v2.{}.revision.{}.cron".format(
            graph_config["trust-domain"], parameters["project"], parameters["head_rev"]
        )
        for task_id in list_tasks(namespace):
            fetches.append(e.submit(fetch_cron, task_id))

        # now wait for each fetch to complete, raising an exception if there
        # were any issues
        for f in futures.as_completed(fetches):
            f.result()

    return (decision_task_id, full_task_graph, label_to_taskid)
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = graph_config['release-promotion']['flavors'][
        release_promotion_flavor]

    target_tasks_method = promotion_config['target-tasks-method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds') or promotion_config.get(
        'rebuild-kinds', [])
    do_not_optimize = input.get('do_not_optimize') or promotion_config.get(
        'do-not-optimize', [])

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters['optimize_target_tasks'] = True
    parameters['shipping_phase'] = input['release_promotion_flavor']

    version_in_file = read_version_file()
    parameters['version'] = input['version'] if input.get(
        'version') else read_version_file()
    version_string = parameters['version']
    if version_string != version_in_file:
        raise ValueError(
            "Version given in tag ({}) does not match the one in version.txt ({})"
            .format(version_string, version_in_file))
    parameters['head_tag'] = 'v{}'.format(version_string)

    parameters['next_version'] = input['next_version']

    parameters['release_type'] = "release"

    parameters['pull_request_number'] = None
    parameters['tasks_for'] = 'action'

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
Ejemplo n.º 5
0
def backfill_action(parameters, graph_config, input, task_group_id, task_id):
    task = taskcluster.get_task_definition(task_id)
    label = task['metadata']['name']
    pushes = []
    inclusive_tweak = 1 if input.get('inclusive') else 0
    depth = input.get('depth', 9) + inclusive_tweak
    end_id = int(parameters['pushlog_id']) - (1 - inclusive_tweak)

    while True:
        start_id = max(end_id - depth, 0)
        pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'],
                                          start_id, end_id)
        r = requests.get(pushlog_url)
        r.raise_for_status()
        pushes = pushes + r.json()['pushes'].keys()
        if len(pushes) >= depth:
            break

        end_id = start_id - 1
        start_id -= depth
        if start_id < 0:
            break

    pushes = sorted(pushes)[-depth:]
    backfill_pushes = []

    for push in pushes:
        try:
            full_task_graph = get_artifact_from_index(
                INDEX_TMPL.format(parameters['project'], push),
                'public/full-task-graph.json')
            _, full_task_graph = TaskGraph.from_json(full_task_graph)
            label_to_taskid = get_artifact_from_index(
                INDEX_TMPL.format(parameters['project'], push),
                'public/label-to-taskid.json')
            push_params = get_artifact_from_index(
                INDEX_TMPL.format(parameters['project'], push),
                'public/parameters.yml')
            push_decision_task_id = find_decision_task(push_params,
                                                       graph_config)
        except HTTPError as e:
            logger.info(
                'Skipping {} due to missing index artifacts! Error: {}'.format(
                    push, e))
            continue

        if label in full_task_graph.tasks.keys():

            def modifier(task):
                if task.label != label:
                    return task

                if input.get('testPath', ''):
                    is_wpttest = 'web-platform' in task.task['metadata'][
                        'name']
                    is_android = 'android' in task.task['metadata']['name']
                    gpu_required = False
                    if (not is_wpttest) and \
                       ('gpu' in task.task['metadata']['name'] or
                        'webgl' in task.task['metadata']['name'] or
                        ('reftest' in task.task['metadata']['name'] and
                         'jsreftest' not in task.task['metadata']['name'])):
                        gpu_required = True

                    # Create new cmd that runs a test-verify type job
                    preamble_length = 3
                    verify_args = [
                        '--e10s', '--verify', '--total-chunk=1',
                        '--this-chunk=1'
                    ]
                    if is_android:
                        # no --e10s; todo, what about future geckoView?
                        verify_args.remove('--e10s')

                    if gpu_required:
                        verify_args.append('--gpu-required')

                    if 'testPath' in input:
                        task.task['payload']['env'][
                            'MOZHARNESS_TEST_PATHS'] = six.ensure_text(
                                json.dumps(
                                    {
                                        task.task['extra']['suite']['flavor']:
                                        [input['testPath']]
                                    },
                                    sort_keys=True))

                    cmd_parts = task.task['payload']['command']
                    keep_args = [
                        '--installer-url', '--download-symbols',
                        '--test-packages-url'
                    ]
                    cmd_parts = remove_args_from_command(
                        cmd_parts, preamble_length, keep_args)
                    cmd_parts = add_args_to_command(cmd_parts, verify_args)
                    task.task['payload']['command'] = cmd_parts

                    # morph the task label to a test-verify job
                    pc = task.task['metadata']['name'].split('/')
                    config = pc[-1].split('-')
                    subtype = ''
                    symbol = 'TV-bf'
                    if gpu_required:
                        subtype = '-gpu'
                        symbol = 'TVg-bf'
                    if is_wpttest:
                        subtype = '-wpt'
                        symbol = 'TVw-bf'
                    if not is_android:
                        subtype = "%s-e10s" % subtype
                    newlabel = "%s/%s-test-verify%s" % (pc[0], config[0],
                                                        subtype)
                    task.task['metadata']['name'] = newlabel
                    task.task['tags']['label'] = newlabel

                    task.task['extra']['index']['rank'] = 0
                    task.task['extra']['chunks']['current'] = 1
                    task.task['extra']['chunks']['total'] = 1

                    task.task['extra']['suite']['name'] = 'test-verify'
                    task.task['extra']['suite']['flavor'] = 'test-verify'

                    task.task['extra']['treeherder']['symbol'] = symbol
                    del task.task['extra']['treeherder']['groupSymbol']
                return task

            times = input.get('times', 1)
            for i in range(times):
                create_tasks(graph_config, [label],
                             full_task_graph,
                             label_to_taskid,
                             push_params,
                             push_decision_task_id,
                             push,
                             modifier=modifier)
            backfill_pushes.append(push)
        else:
            logging.info('Could not find {} on {}. Skipping.'.format(
                label, push))
    combine_task_graph_files(backfill_pushes)
Ejemplo n.º 6
0
def release_promotion_action(parameters, graph_config, input, task_group_id, task_id):
    release_promotion_flavor = input["release_promotion_flavor"]
    promotion_config = graph_config["release-promotion"]["flavors"][
        release_promotion_flavor
    ]

    target_tasks_method = promotion_config["target-tasks-method"].format(
        project=parameters["project"]
    )
    rebuild_kinds = input.get("rebuild_kinds") or promotion_config.get(
        "rebuild-kinds", []
    )
    do_not_optimize = input.get("do_not_optimize") or promotion_config.get(
        "do-not-optimize", []
    )

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
    previous_graph_ids = input.get("previous_graph_ids")
    if not previous_graph_ids:
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds
    )
    parameters["do_not_optimize"] = do_not_optimize
    parameters["target_tasks_method"] = target_tasks_method
    parameters["build_number"] = int(input["build_number"])
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters["optimize_target_tasks"] = True
    parameters["shipping_phase"] = input["release_promotion_flavor"]

    version_in_file = read_version_file()
    parameters["version"] = (
        input["version"] if input.get("version") else read_version_file()
    )
    version_string = parameters["version"]
    if version_string != version_in_file:
        raise ValueError(
            "Version given in tag ({}) does not match the one in version.txt ({})".format(
                version_string, version_in_file
            )
        )
    parameters["head_tag"] = "v{}".format(version_string)

    parameters["next_version"] = input["next_version"]

    version = FenixVersion.parse(version_string)
    if version.is_beta:
        release_type = "beta"
    elif version.is_release:
        release_type = "release"
    elif version.is_release_candidate:
        release_type = "release"
    else:
        raise ValueError("Unsupported version type: {}".format(version.version_type))
    parameters["release_type"] = release_type
    parameters["tasks_for"] = "action"

    parameters["pull_request_number"] = None

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
Ejemplo n.º 7
0
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = graph_config['release-promotion']['flavors'][
        release_promotion_flavor]
    release_history = {}
    product = promotion_config['product']

    next_version = str(input.get('next_version') or '')
    if promotion_config.get('version-bump', False):
        # We force str() the input, hence the 'None'
        if next_version in ['', 'None']:
            raise Exception(
                "`next_version` property needs to be provided for `{}` "
                "target.".format(release_promotion_flavor))

    if promotion_config.get('partial-updates', False):
        partial_updates = input.get('partial_updates', {})
        if not partial_updates and release_level(
                parameters['project']) == 'production':
            raise Exception(
                "`partial_updates` property needs to be provided for `{}`"
                "target.".format(release_promotion_flavor))
        balrog_prefix = product.title()
        os.environ['PARTIAL_UPDATES'] = json.dumps(partial_updates)
        release_history = populate_release_history(
            balrog_prefix,
            parameters['project'],
            partial_updates=partial_updates)

    target_tasks_method = promotion_config['target-tasks-method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds',
                              promotion_config.get('rebuild-kinds', []))
    do_not_optimize = input.get('do_not_optimize',
                                promotion_config.get('do-not-optimize', []))

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids``, ``pushlog_id``,
    # or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        revision = input.get('revision')
        if not parameters['pushlog_id']:
            repo_param = '{}head_repository'.format(
                graph_config['project-repo-param-prefix'])
            push_info = find_hg_revision_push_info(
                repository=parameters[repo_param], revision=revision)
            parameters['pushlog_id'] = push_info['pushid']
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    parameters['next_version'] = next_version
    parameters['release_history'] = release_history
    if promotion_config.get('is-rc'):
        parameters['release_type'] += '-rc'
    parameters['release_eta'] = input.get('release_eta', '')
    parameters['release_product'] = product
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters['optimize_target_tasks'] = True

    # Partner/EMEfree are enabled by default when get_partner_url_config() returns a non-null url
    # The action input may override by sending False. It's an error to send True with no url found
    partner_url_config = get_partner_url_config(parameters, graph_config)
    release_enable_partners = partner_url_config[
        'release-partner-repack'] is not None
    release_enable_emefree = partner_url_config[
        'release-eme-free-repack'] is not None
    if input.get('release_enable_partners') is False:
        release_enable_partners = False
    elif input.get(
            'release_enable_partners') is True and not release_enable_partners:
        raise Exception(
            "Can't enable partner repacks when no config url found")
    if input.get('release_enable_emefree') is False:
        release_enable_emefree = False
    elif input.get(
            'release_enable_emefree') is True and not release_enable_emefree:
        raise Exception("Can't enable EMEfree when no config url found")
    parameters['release_enable_partners'] = release_enable_partners
    parameters['release_enable_emefree'] = release_enable_emefree

    partner_config = input.get('release_partner_config')
    if not partner_config and (release_enable_emefree
                               or release_enable_partners):
        github_token = get_token(parameters)
        partner_config = get_partner_config(partner_url_config, github_token)
    if partner_config:
        parameters['release_partner_config'] = fix_partner_config(
            partner_config)
    parameters['release_partners'] = input.get('release_partners')
    if input.get('release_partner_build_number'):
        parameters['release_partner_build_number'] = input[
            'release_partner_build_number']

    if input['version']:
        parameters['version'] = input['version']

    parameters['required_signoffs'] = get_required_signoffs(input, parameters)
    parameters['signoff_urls'] = get_signoff_urls(input, parameters)

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
Ejemplo n.º 8
0
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id):
    release_promotion_flavor = input["release_promotion_flavor"]
    promotion_config = graph_config["release-promotion"]["flavors"][
        release_promotion_flavor]
    release_history = {}
    product = promotion_config["product"]

    next_version = str(input.get("next_version") or "")
    if promotion_config.get("version-bump", False):
        # We force str() the input, hence the 'None'
        if next_version in ["", "None"]:
            raise Exception(
                "`next_version` property needs to be provided for `{}` "
                "target.".format(release_promotion_flavor))

    if promotion_config.get("partial-updates", False):
        partial_updates = input.get("partial_updates", {})
        if not partial_updates and release_level(
                parameters["project"]) == "production":
            raise Exception(
                "`partial_updates` property needs to be provided for `{}`"
                "target.".format(release_promotion_flavor))
        balrog_prefix = product.title()
        os.environ["PARTIAL_UPDATES"] = json.dumps(partial_updates,
                                                   sort_keys=True)
        release_history = populate_release_history(
            balrog_prefix,
            parameters["project"],
            partial_updates=partial_updates)

    target_tasks_method = promotion_config["target-tasks-method"].format(
        project=parameters["project"])
    rebuild_kinds = input.get("rebuild_kinds",
                              promotion_config.get("rebuild-kinds", []))
    do_not_optimize = input.get("do_not_optimize",
                                promotion_config.get("do-not-optimize", []))

    # Build previous_graph_ids from ``previous_graph_ids``, ``revision``,
    # or the action parameters.
    previous_graph_ids = input.get("previous_graph_ids")
    if not previous_graph_ids:
        revision = input.get("revision")
        if revision:
            head_rev_param = "{}head_rev".format(
                graph_config["project-repo-param-prefix"])
            push_parameters = {
                head_rev_param: revision,
                "project": parameters["project"],
            }
        else:
            push_parameters = parameters
        previous_graph_ids = [
            find_decision_task(push_parameters, graph_config)
        ]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters["do_not_optimize"] = do_not_optimize
    parameters["target_tasks_method"] = target_tasks_method
    parameters["build_number"] = int(input["build_number"])
    parameters["next_version"] = next_version
    parameters["release_history"] = release_history
    if promotion_config.get("is-rc"):
        parameters["release_type"] += "-rc"
    parameters["release_eta"] = input.get("release_eta", "")
    parameters["release_product"] = product
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters["optimize_target_tasks"] = True

    if release_promotion_flavor == "promote_firefox_partner_repack":
        release_enable_partner_repack = True
        release_enable_partner_attribution = False
        release_enable_emefree = False
    elif release_promotion_flavor == "promote_firefox_partner_attribution":
        release_enable_partner_repack = False
        release_enable_partner_attribution = True
        release_enable_emefree = False
    else:
        # for promotion or ship phases, we use the action input to turn the repacks/attribution off
        release_enable_partner_repack = input.get(
            "release_enable_partner_repack", True)
        release_enable_partner_attribution = input.get(
            "release_enable_partner_attribution", True)
        release_enable_emefree = input.get("release_enable_emefree", True)

    partner_url_config = get_partner_url_config(parameters, graph_config)
    if (release_enable_partner_repack
            and not partner_url_config["release-partner-repack"]):
        raise Exception(
            "Can't enable partner repacks when no config url found")
    if (release_enable_partner_attribution
            and not partner_url_config["release-partner-attribution"]):
        raise Exception(
            "Can't enable partner attribution when no config url found")
    if release_enable_emefree and not partner_url_config[
            "release-eme-free-repack"]:
        raise Exception(
            "Can't enable EMEfree repacks when no config url found")
    parameters["release_enable_partner_repack"] = release_enable_partner_repack
    parameters[
        "release_enable_partner_attribution"] = release_enable_partner_attribution
    parameters["release_enable_emefree"] = release_enable_emefree

    partner_config = input.get("release_partner_config")
    if not partner_config and any([
            release_enable_partner_repack,
            release_enable_partner_attribution,
            release_enable_emefree,
    ]):
        github_token = get_token(parameters)
        partner_config = get_partner_config(partner_url_config, github_token)
    if partner_config:
        parameters["release_partner_config"] = fix_partner_config(
            partner_config)
    parameters["release_partners"] = input.get("release_partners")
    if input.get("release_partner_build_number"):
        parameters["release_partner_build_number"] = input[
            "release_partner_build_number"]

    if input["version"]:
        parameters["version"] = input["version"]

    parameters["required_signoffs"] = get_required_signoffs(input, parameters)
    parameters["signoff_urls"] = get_signoff_urls(input, parameters)

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)