Beispiel #1
0
def trigger_action_callback(task_group_id,
                            task_id,
                            input,
                            callback,
                            parameters,
                            root,
                            test=False):
    """
    Trigger action callback with the given inputs. If `test` is true, then run
    the action callback in testing mode, without actually creating tasks.
    """
    graph_config = load_graph_config(root)
    callbacks = _get_callbacks(graph_config)
    cb = callbacks.get(callback, None)
    if not cb:
        raise Exception('Unknown callback: {}. Known callbacks: {}'.format(
            callback, ', '.join(callbacks)))

    if test:
        create.testing = True
        taskcluster.testing = True

    if not test:
        sanity_check_task_scope(callback, parameters, graph_config)

    # fetch the target task, if taskId was given
    # FIXME: many actions don't need this, so move this fetch into the callbacks
    # that do need it
    if task_id:
        task = taskcluster.get_task_definition(task_id)
    else:
        task = None

    cb(Parameters(**parameters), graph_config, input, task_group_id, task_id,
       task)
Beispiel #2
0
 def test_Parameters_check_extra(self):
     """
     If parameters other than the global and comm parameters are specified,
     an error is reported.
     """
     p = Parameters(extra="data", **self.vals)
     self.assertRaises(Exception, p.check)
Beispiel #3
0
    def test_Parameters_immutable(self):
        p = Parameters(**self.vals)

        def assign():
            p['head_ref'] = 20

        self.assertRaises(Exception, assign)
Beispiel #4
0
def merge_automation_action(parameters, graph_config, input, task_group_id, task_id):

    # make parameters read-write
    parameters = dict(parameters)

    parameters["target_tasks_method"] = "merge_automation"
    parameters["merge_config"] = {
        "force-dry-run": input.get("force-dry-run", False),
        "behavior": input["behavior"],
    }

    for field in [
        "from-repo",
        "from-branch",
        "to-repo",
        "to-branch",
        "ssh-user-alias",
        "push",
        "fetch-version-from",
    ]:
        if input.get(field):
            parameters["merge_config"][field] = input[field]
    parameters["tasks_for"] = "action"

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
Beispiel #5
0
def target_tasks_try_select_uncommon(full_task_graph, parameters,
                                     graph_config):
    from taskgraph.decision import PER_PROJECT_PARAMETERS

    projects = ("autoland", "mozilla-central")
    if parameters["project"] not in projects:
        projects = (parameters["project"], )

    tasks = set()
    for project in projects:
        params = dict(parameters)
        params["project"] = project
        parameters = Parameters(**params)

        try:
            target_tasks_method = PER_PROJECT_PARAMETERS[project][
                "target_tasks_method"]
        except KeyError:
            target_tasks_method = "default"

        tasks.update(
            get_method(target_tasks_method)(full_task_graph, parameters,
                                            graph_config))

    return sorted(tasks)
Beispiel #6
0
def target_tasks_try_auto(full_task_graph, parameters, graph_config):
    """Target the tasks which have indicated they should be run on autoland
    (rather than try) via the `run_on_projects` attributes.

    Should do the same thing as the `default` target tasks method.
    """
    params = dict(parameters)
    params["project"] = "autoland"
    parameters = Parameters(**params)

    regex_filters = parameters["try_task_config"].get("tasks-regex")
    include_regexes = exclude_regexes = []
    if regex_filters:
        include_regexes = [
            re.compile(r) for r in regex_filters.get("include", [])
        ]
        exclude_regexes = [
            re.compile(r) for r in regex_filters.get("exclude", [])
        ]

    return [
        l for l, t in six.iteritems(full_task_graph.tasks)
        if standard_filter(t, parameters) and filter_out_shipping_phase(
            t, parameters) and filter_out_devedition(t, parameters)
        and filter_by_uncommon_try_tasks(t.label)
        and filter_by_regex(t.label, include_regexes, mode="include")
        and filter_by_regex(t.label, exclude_regexes, mode="exclude")
        and filter_unsupported_artifact_builds(t, parameters)
    ]
def trigger_action_callback(task_group_id,
                            task_id,
                            task,
                            input,
                            callback,
                            parameters,
                            root,
                            test=False):
    """
    Trigger action callback with the given inputs. If `test` is true, then run
    the action callback in testing mode, without actually creating tasks.
    """
    graph_config = load_graph_config(root)
    callbacks = _get_callbacks(graph_config)
    cb = callbacks.get(callback, None)
    if not cb:
        raise Exception('Unknown callback: {}. Known callbacks: {}'.format(
            callback, callbacks))

    if test:
        create.testing = True
        taskcluster.testing = True

    cb(Parameters(**parameters), graph_config, input, task_group_id, task_id,
       task)
Beispiel #8
0
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = graph_config['release-promotion']['flavors'][
        release_promotion_flavor]

    target_tasks_method = promotion_config['target-tasks-method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds') or promotion_config.get(
        'rebuild-kinds', [])
    do_not_optimize = input.get('do_not_optimize') or promotion_config.get(
        'do-not-optimize', [])

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters['optimize_target_tasks'] = True
    parameters['xpi_name'] = input['xpi_name']
    # TODO
    #  - require this is a specific revision
    #  - possibly also check that this is on a reviewed PR or merged into
    #    a trusted branch. this will require an oauth token
    parameters['xpi_revision'] = input.get('revision', 'master')
    parameters['shipping_phase'] = input['release_promotion_flavor']

    # We blow away `tasks_for` when we load the on-push decision task's
    # parameters.yml. Let's set this back to `action`.
    parameters['tasks_for'] = "action"

    if input.get('version'):
        parameters['version'] = input['version']

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
 def test_Parameters_check_missing(self):
     """
     If any of the comm parameters are specified, all of them must be specified.
     """
     vals = self.vals.copy()
     del vals[next(iter(COMM_PARAMETERS.keys()))]
     p = Parameters(**vals)
     self.assertRaises(Exception, p.check)
Beispiel #10
0
 def test_Parameters_check_missing(self):
     """
     If any of the comm parameters are specified, all of them must be specified.
     """
     vals = self.vals.copy()
     del vals['comm_base_repository']
     p = Parameters(**vals)
     self.assertRaises(Exception, p.check)
Beispiel #11
0
 def logfile(spec):
     """Determine logfile given a parameters specification."""
     if logdir is None:
         return None
     return os.path.join(
         logdir,
         "{}_{}.log".format(options["graph_attr"],
                            Parameters.format_spec(spec)),
     )
Beispiel #12
0
def get_image_digest(image_name):
    from taskgraph.generator import load_tasks_for_kind
    from taskgraph.parameters import Parameters

    params = Parameters(
        level=os.environ.get("MOZ_SCM_LEVEL", "3"),
        strict=False,
    )
    tasks = load_tasks_for_kind(params, "docker-image")
    task = tasks[f"build-docker-image-{image_name}"]
    return task.attributes["cached_task"]["digest"]
Beispiel #13
0
def target_tasks_try_auto(full_task_graph, parameters, graph_config):
    """Target the tasks which have indicated they should be run on autoland
    (rather than try) via the `run_on_projects` attributes.

    Should do the same thing as the `default` target tasks method.
    """
    params = dict(parameters)
    params['project'] = 'autoland'
    parameters = Parameters(**params)
    return [
        l for l, t in full_task_graph.tasks.iteritems() if
        standard_filter(t, parameters) and filter_out_nightly(t, parameters)
    ]
Beispiel #14
0
def target_tasks_try_select(full_task_graph, parameters, graph_config):
    tasks = set()
    for project in ("autoland", "mozilla-central"):
        params = dict(parameters)
        params["project"] = project
        parameters = Parameters(**params)
        tasks.update([
            l for l, t in six.iteritems(full_task_graph.tasks)
            if standard_filter(t, parameters) and filter_out_shipping_phase(
                t, parameters) and filter_out_devedition(t, parameters)
        ])

    return [l for l in tasks if filter_by_uncommon_try_tasks(l)]
def target_tasks_try_auto(full_task_graph, parameters, graph_config):
    """Target the tasks which have indicated they should be run on autoland
    (rather than try) via the `run_on_projects` attributes.

    Should do the same thing as the `default` target tasks method.
    """
    params = dict(parameters)
    params['project'] = 'autoland'
    parameters = Parameters(**params)
    return [l for l, t in six.iteritems(full_task_graph.tasks)
            if standard_filter(t, parameters)
            and filter_out_shipping_phase(t, parameters)
            and filter_out_devedition(t, parameters)
            and filter_by_uncommon_try_tasks(t.label)]
Beispiel #16
0
def trigger_action_callback():
    """
    Trigger action callback using arguments from environment variables.
    """
    global callbacks
    task_group_id = os.environ.get('ACTION_TASK_GROUP_ID', None)
    task_id = json.loads(os.environ.get('ACTION_TASK_ID', 'null'))
    task = json.loads(os.environ.get('ACTION_TASK', 'null'))
    input = json.loads(os.environ.get('ACTION_INPUT', 'null'))
    callback = os.environ.get('ACTION_CALLBACK', None)
    parameters = json.loads(os.environ.get('ACTION_PARAMETERS', 'null'))
    cb = callbacks.get(callback, None)
    if not cb:
        raise Exception('Unknown callback: {}'.format(callback))
    cb(Parameters(**parameters), input, task_group_id, task_id, task)
Beispiel #17
0
def trigger_action_callback(task_group_id, task_id, task, input, callback, parameters,
                            test=False):
    """
    Trigger action callback with the given inputs. If `test` is true, then run
    the action callback in testing mode, without actually creating tasks.
    """
    cb = get_callbacks().get(callback, None)
    if not cb:
        raise Exception('Unknown callback: {}. Known callbacks: {}'.format(
            callback, get_callbacks().keys()))

    if test:
        create.testing = True
        taskcluster.testing = True

    cb(Parameters(**parameters), input, task_group_id, task_id, task)
Beispiel #18
0
def release_promotion_action(parameters, input, task_group_id, task_id, task):
    release_promotion_flavor = input['release_promotion_flavor']
    next_version = str(input.get('next_version') or '')
    if release_promotion_flavor in VERSION_BUMP_FLAVORS:
        # We force str() the input, hence the 'None'
        if next_version in ['', 'None']:
            raise Exception(
                "`next_version` property needs to be provided for %s "
                "targets." % ', '.join(VERSION_BUMP_FLAVORS))
    promotion_config = RELEASE_PROMOTION_CONFIG[release_promotion_flavor]

    target_tasks_method = input.get(
        'target_tasks_method', promotion_config['target_tasks_method'].format(
            project=parameters['project']))
    previous_graph_kinds = input.get('previous_graph_kinds',
                                     promotion_config['previous_graph_kinds'])
    do_not_optimize = input.get('do_not_optimize',
                                promotion_config['do_not_optimize'])

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids``, ``pushlog_id``,
    # or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        revision = input.get('revision')
        parameters['pushlog_id'] = parameters['pushlog_id'] or \
            find_hg_revision_pushlog_id(parameters, revision)
        previous_graph_ids = [find_decision_task(parameters)]

    # Download parameters and full task graph from the first decision task.
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    full_task_graph = get_artifact(previous_graph_ids[0],
                                   "public/full-task-graph.json")
    _, full_task_graph = TaskGraph.from_json(full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        full_task_graph, previous_graph_ids, previous_graph_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = str(input['build_number'])
    parameters['next_version'] = next_version

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({}, parameters=parameters)
def scriptworker_canary(parameters, graph_config, input, task_group_id,
                        task_id):
    scriptworkers = input["scriptworkers"]

    # make parameters read-write
    parameters = dict(parameters)

    parameters["target_tasks_method"] = "scriptworker_canary"
    parameters["try_task_config"] = {
        "scriptworker-canary-workers": scriptworkers,
    }
    parameters["tasks_for"] = "action"

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({"root": graph_config.root_dir}, parameters=parameters)
Beispiel #20
0
def load_image_by_name(image_name, tag=None):
    params = Parameters(
        level=os.environ.get('MOZ_SCM_LEVEL', '3'),
        strict=False,
    )
    tasks = load_tasks_for_kind(params, 'docker-image')
    task = tasks['build-docker-image-{}'.format(image_name)]
    task_id = IndexSearch().should_replace_task(
        task, {}, task.optimization.get('index-search', []))

    if task_id in (True, False):
        print('Could not find artifacts for a docker image '
              'named `{image_name}`. Local commits and other changes '
              'in your checkout may cause this error. Try '
              'updating to a fresh checkout of mozilla-central '
              'to download image.'.format(image_name=image_name))
        return False

    return load_image_by_task_id(task_id, tag)
Beispiel #21
0
def dump_output(out, path=None, params_spec=None):
    from taskgraph.parameters import Parameters

    params_name = Parameters.format_spec(params_spec)
    fh = None
    if path:
        # Substitute params name into file path if necessary
        if params_spec and "{params}" not in path:
            name, ext = os.path.splitext(path)
            name += "_{params}"
            path = name + ext

        path = path.format(params=params_name)
        fh = open(path, "w")
    else:
        print(
            "Dumping result with parameters from {}:".format(params_name),
            file=sys.stderr,
        )
    print(out + "\n", file=fh)
Beispiel #22
0
def load_image_by_name(image_name, tag=None):
    from taskgraph.generator import load_tasks_for_kind
    from taskgraph.optimize import IndexSearch
    from taskgraph.parameters import Parameters

    params = Parameters(
        level=os.environ.get("MOZ_SCM_LEVEL", "3"),
        strict=False,
    )
    tasks = load_tasks_for_kind(params, "docker-image")
    task = tasks[f"build-docker-image-{image_name}"]
    task_id = IndexSearch().should_replace_task(
        task, {}, task.optimization.get("index-search", []))

    if task_id in (True, False):
        print("Could not find artifacts for a docker image "
              "named `{image_name}`. Local commits and other changes "
              "in your checkout may cause this error. Try "
              "updating to a fresh checkout of mozilla-central "
              "to download image.".format(image_name=image_name))
        return False

    return load_image_by_task_id(task_id, tag)
Beispiel #23
0
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id, task):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = graph_config['release-promotion']['flavors'][
        release_promotion_flavor]
    release_history = {}
    product = promotion_config['product']

    next_version = str(input.get('next_version') or '')
    if promotion_config.get('version-bump', False):
        # We force str() the input, hence the 'None'
        if next_version in ['', 'None']:
            raise Exception(
                "`next_version` property needs to be provided for `{}` "
                "target.".format(release_promotion_flavor))

    if promotion_config.get('partial-updates', False):
        partial_updates = json.dumps(input.get('partial_updates', {}))
        if partial_updates == "{}":
            raise Exception(
                "`partial_updates` property needs to be provided for `{}`"
                "target.".format(release_promotion_flavor))
        balrog_prefix = product.title()
        os.environ['PARTIAL_UPDATES'] = partial_updates
        release_history = populate_release_history(
            balrog_prefix,
            parameters['project'],
            partial_updates=input['partial_updates'])

    target_tasks_method = promotion_config['target-tasks-method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds',
                              promotion_config.get('rebuild-kinds', []))
    do_not_optimize = input.get('do_not_optimize',
                                promotion_config.get('do-not-optimize', []))
    release_enable_partners = input.get(
        'release_enable_partners', parameters['project'] in PARTNER_BRANCHES
        and product in ('firefox', ))
    release_enable_emefree = input.get(
        'release_enable_emefree', parameters['project'] in EMEFREE_BRANCHES
        and product in ('firefox', ))

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids``, ``pushlog_id``,
    # or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        revision = input.get('revision')
        parameters['pushlog_id'] = parameters['pushlog_id'] or \
            find_hg_revision_pushlog_id(parameters, graph_config, revision)
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    parameters['next_version'] = next_version
    parameters['release_history'] = release_history
    parameters['release_type'] = promotion_config.get('release-type', '')
    parameters['release_eta'] = input.get('release_eta', '')
    parameters['release_enable_partners'] = release_enable_partners
    parameters['release_partners'] = input.get('release_partners')
    parameters['release_enable_emefree'] = release_enable_emefree
    parameters['release_product'] = product
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters['optimize_target_tasks'] = True

    partner_config = input.get('release_partner_config')
    if not partner_config and (release_enable_emefree
                               or release_enable_partners):
        partner_url_config = get_partner_url_config(
            parameters,
            graph_config,
            enable_emefree=release_enable_emefree,
            enable_partners=release_enable_partners)
        github_token = get_token(parameters)
        partner_config = get_partner_config(partner_url_config, github_token)

    if input.get('release_partner_build_number'):
        parameters['release_partner_build_number'] = input[
            'release_partner_build_number']

    if partner_config:
        parameters['release_partner_config'] = fix_partner_config(
            partner_config)

    if input['version']:
        parameters['version'] = input['version']

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
Beispiel #24
0
    def test_Parameters_check_extra(self):
        p = Parameters(xyz=10, **self.vals)
        self.assertRaises(ParameterMismatch, lambda: p.check())

        p = Parameters(strict=False, xyz=10, **self.vals)
        p.check()  # should not raise
Beispiel #25
0
 def test_Parameters_check(self):
     """
     Specifying all of the gecko and comm parameters doesn't result in an error.
     """
     p = Parameters(**self.vals)
     p.check()  # should not raise
Beispiel #26
0
    def test_Parameters_check_extra(self):
        p = Parameters(xyz=10, **self.vals)
        self.assertRaises(Exception, lambda: p.check())

        p = Parameters(strict=False, xyz=10, **self.vals)
        p.check()  # should not raise
Beispiel #27
0
    def test_Parameters_check_missing(self):
        p = Parameters()
        self.assertRaises(Exception, lambda: p.check())

        p = Parameters(strict=False)
        p.check()  # should not raise
Beispiel #28
0
 def test_Parameters_check(self):
     p = Parameters(**self.vals)
     p.check()  # should not raise
Beispiel #29
0
 def test_Parameters_get(self):
     p = Parameters(head_ref=10, level=20)
     self.assertEqual(p['head_ref'], 10)
Beispiel #30
0
 def test_Parameters_invalid_KeyError(self):
     """even if the value is present, if it's not a valid property, raise KeyError"""
     p = Parameters(xyz=10, strict=True, **self.vals)
     self.assertRaises(Exception, lambda: p.check())
Beispiel #31
0
    def test_Parameters_check_missing(self):
        p = Parameters()
        self.assertRaises(ParameterMismatch, lambda: p.check())

        p = Parameters(strict=False)
        p.check()  # should not raise
def release_promotion_action(parameters, input, task_group_id, task_id, task):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = RELEASE_PROMOTION_CONFIG[release_promotion_flavor]
    release_history = {}
    product = promotion_config['product']

    next_version = str(input.get('next_version') or '')
    if release_promotion_flavor in VERSION_BUMP_FLAVORS:
        # We force str() the input, hence the 'None'
        if next_version in ['', 'None']:
            raise Exception(
                "`next_version` property needs to be provided for %s "
                "targets." % ', '.join(VERSION_BUMP_FLAVORS))

    if product in ('firefox', 'devedition'):
        if release_promotion_flavor in PARTIAL_UPDATES_FLAVORS:
            partial_updates = json.dumps(input.get('partial_updates', {}))
            if partial_updates == "{}":
                raise Exception(
                    "`partial_updates` property needs to be provided for %s "
                    "targets." % ', '.join(PARTIAL_UPDATES_FLAVORS))
            balrog_prefix = product.title()
            os.environ['PARTIAL_UPDATES'] = partial_updates
            release_history = populate_release_history(
                balrog_prefix,
                parameters['project'],
                partial_updates=input['partial_updates'])

    promotion_config = RELEASE_PROMOTION_CONFIG[release_promotion_flavor]

    target_tasks_method = promotion_config['target_tasks_method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds',
                              promotion_config.get('rebuild_kinds', []))
    do_not_optimize = input.get('do_not_optimize',
                                promotion_config.get('do_not_optimize', []))

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids``, ``pushlog_id``,
    # or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        revision = input.get('revision')
        parameters['pushlog_id'] = parameters['pushlog_id'] or \
            find_hg_revision_pushlog_id(parameters, revision)
        previous_graph_ids = [find_decision_task(parameters)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    parameters['next_version'] = next_version
    parameters['release_history'] = release_history
    parameters['release_type'] = promotion_config.get('release_type', '')
    parameters['release_eta'] = input.get('release_eta', '')
    if input['version']:
        parameters['version'] = input['version']

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({}, parameters=parameters)
def release_promotion_action(parameters, graph_config, input, task_group_id,
                             task_id):
    release_promotion_flavor = input['release_promotion_flavor']
    promotion_config = graph_config['release-promotion']['flavors'][
        release_promotion_flavor]

    target_tasks_method = promotion_config['target-tasks-method'].format(
        project=parameters['project'])
    rebuild_kinds = input.get('rebuild_kinds') or promotion_config.get(
        'rebuild-kinds', [])
    do_not_optimize = input.get('do_not_optimize') or promotion_config.get(
        'do-not-optimize', [])

    # make parameters read-write
    parameters = dict(parameters)
    # Build previous_graph_ids from ``previous_graph_ids`` or ``revision``.
    previous_graph_ids = input.get('previous_graph_ids')
    if not previous_graph_ids:
        previous_graph_ids = [find_decision_task(parameters, graph_config)]

    # Download parameters from the first decision task
    parameters = get_artifact(previous_graph_ids[0], "public/parameters.yml")
    # Download and combine full task graphs from each of the previous_graph_ids.
    # Sometimes previous relpro action tasks will add tasks, like partials,
    # that didn't exist in the first full_task_graph, so combining them is
    # important. The rightmost graph should take precedence in the case of
    # conflicts.
    combined_full_task_graph = {}
    for graph_id in previous_graph_ids:
        full_task_graph = get_artifact(graph_id, "public/full-task-graph.json")
        combined_full_task_graph.update(full_task_graph)
    _, combined_full_task_graph = TaskGraph.from_json(combined_full_task_graph)
    parameters['existing_tasks'] = find_existing_tasks_from_previous_kinds(
        combined_full_task_graph, previous_graph_ids, rebuild_kinds)
    parameters['do_not_optimize'] = do_not_optimize
    parameters['target_tasks_method'] = target_tasks_method
    parameters['build_number'] = int(input['build_number'])
    # When doing staging releases on try, we still want to re-use tasks from
    # previous graphs.
    parameters['optimize_target_tasks'] = True
    parameters['shipping_phase'] = input['release_promotion_flavor']

    version_in_file = read_version_file()
    parameters['version'] = input['version'] if input.get(
        'version') else read_version_file()
    version_string = parameters['version']
    if version_string != version_in_file:
        raise ValueError(
            "Version given in tag ({}) does not match the one in version.txt ({})"
            .format(version_string, version_in_file))
    parameters['head_tag'] = 'v{}'.format(version_string)

    parameters['next_version'] = input['next_version']

    parameters['release_type'] = "release"

    parameters['pull_request_number'] = None
    parameters['tasks_for'] = 'action'

    # make parameters read-only
    parameters = Parameters(**parameters)

    taskgraph_decision({'root': graph_config.root_dir}, parameters=parameters)
Beispiel #34
0
 def test_Parameters_check(self):
     p = Parameters(**self.vals)
     p.check()  # should not raise