def _go_under_template(
    agent: TemplateObjectVariable,
    goal_object: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_distal: bool,  # pylint:disable=unused-argument
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"go_under-{agent.handle}-under-{goal_object.handle}",
        salient_object_variables=[agent, goal_object],
        background_object_variables=background,
        actions=[
            Action(
                GO,
                argument_roles_to_fillers=[
                    (AGENT, agent),
                    (
                        GOAL,
                        Region(
                            goal_object, distance=PROXIMAL, direction=GRAVITATIONAL_DOWN
                        ),
                    ),
                ],
            )
        ],
        before_action_relations=[negate(on(goal_object, GROUND_OBJECT_TEMPLATE))],
        asserted_always_relations=[negate(on(goal_object, GROUND_OBJECT_TEMPLATE))],
        after_action_relations=[
            negate(on(goal_object, GROUND_OBJECT_TEMPLATE)),
            near(agent, goal_object),
        ],
        constraining_relations=flatten_relations(bigger_than(goal_object, agent)),
    )
Example #2
0
def _under_template(
    figure: TemplateObjectVariable,
    ground: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_training: bool,
    is_distal: bool,
    syntax_hints: Iterable[str] = [],
) -> Phase1SituationTemplate:
    handle = "training" if is_training else "testing"
    # TODO: currently this hack keeps old implementation for English that hasn't solved https://github.com/isi-vista/adam/issues/802
    # and returns new implementation for Chinese that does solve this
    return Phase1SituationTemplate(
        f"preposition-{handle}-{figure.handle}-under-{ground.handle}",
        salient_object_variables=[figure, ground],
        background_object_variables=background,
        asserted_always_relations=[
            strictly_under(ground,
                           figure,
                           dist=DISTAL if is_distal else PROXIMAL)
        ],
        constraining_relations=[bigger_than(ground, figure)],
        gazed_objects=[figure],
        syntax_hints=syntax_hints,
    )
Example #3
0
def _under_template(
    figure: TemplateObjectVariable,
    ground: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_training: bool,
    is_distal: bool,
    syntax_hints: Iterable[str] = immutableset(),
    background_relations: Iterable[TemplateObjectVariable] = immutableset(),
) -> Phase1SituationTemplate:
    handle = "training" if is_training else "testing"
    relations = [
        negate(on(figure, GROUND_OBJECT_TEMPLATE)),
        strictly_under(ground, figure, dist=DISTAL if is_distal else PROXIMAL),
    ]
    relations.extend(background_relations)  # type: ignore
    return Phase1SituationTemplate(
        f"preposition-{handle}-{figure.handle}-under-{ground.handle}",
        salient_object_variables=[figure, ground],
        background_object_variables=background,
        asserted_always_relations=flatten_relations(relations),
        constraining_relations=[bigger_than(ground, figure)],
        gazed_objects=[figure],
        syntax_hints=syntax_hints,
    )
def _put_on_body_part_template(
    # X puts Y on body part
    agent: TemplateObjectVariable,
    theme: TemplateObjectVariable,
    goal_reference: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"{agent.handle}-puts-{theme.handle}-on-{goal_reference.handle}",
        salient_object_variables=[agent, theme, goal_reference],
        background_object_variables=background,
        actions=[
            Action(
                PUT,
                argument_roles_to_fillers=[
                    (AGENT, agent),
                    (THEME, theme),
                    (
                        GOAL,
                        Region(
                            goal_reference,
                            distance=EXTERIOR_BUT_IN_CONTACT,
                            direction=GRAVITATIONAL_UP,
                        ),
                    ),
                ],
            )
        ],
        constraining_relations=flatten_relations(
            bigger_than([agent, goal_reference], theme)
        ),
        asserted_always_relations=flatten_relations(has(agent, goal_reference)),
    )
def _big_x_template(theme: TemplateObjectVariable,
                    noise_objects: Optional[int]) -> Phase1SituationTemplate:
    learner = learner_template_factory()
    computed_background = [learner]
    computed_background.extend(make_noise_objects(noise_objects))
    return Phase1SituationTemplate(
        f"big-{theme.handle}",
        salient_object_variables=[theme],
        background_object_variables=computed_background,
        asserted_always_relations=[bigger_than(theme, learner)],
    )
def _short_x_template(theme: TemplateObjectVariable,
                      noise_objects: Optional[int]) -> Phase1SituationTemplate:
    learner = learner_template_factory()
    computed_background = [learner]
    computed_background.extend(make_noise_objects(noise_objects))

    # TODO: This difference should be an axis size but we can't yet
    # implement that. See: https://github.com/isi-vista/adam/issues/832
    return Phase1SituationTemplate(
        f"tall-{theme.handle}",
        salient_object_variables=[theme],
        background_object_variables=computed_background,
        asserted_always_relations=[bigger_than(learner, theme)],
        syntax_hints=[USE_VERTICAL_MODIFIERS],
    )
def _go_in_template(
    agent: TemplateObjectVariable,
    goal_object: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"go_in-{agent.handle}-in-{goal_object.handle}",
        salient_object_variables=[agent, goal_object],
        background_object_variables=background,
        actions=[
            Action(
                GO,
                argument_roles_to_fillers=[
                    (AGENT, agent),
                    (GOAL, Region(goal_object, distance=INTERIOR)),
                ],
            )
        ],
        constraining_relations=flatten_relations(bigger_than(goal_object, agent)),
        after_action_relations=[inside(agent, goal_object)],
    )
def _put_in_template(
    agent: TemplateObjectVariable,
    theme: TemplateObjectVariable,
    goal_reference: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"{agent.handle}-puts-{theme.handle}-in-{goal_reference.handle}",
        salient_object_variables=[agent, theme, goal_reference],
        background_object_variables=background,
        actions=[
            Action(
                PUT,
                argument_roles_to_fillers=[
                    (AGENT, agent),
                    (THEME, theme),
                    (GOAL, Region(goal_reference, distance=INTERIOR)),
                ],
            )
        ],
        constraining_relations=flatten_relations(bigger_than(goal_reference, theme)),
    )
def test_big_ball():
    ball1 = situation_object(BALL, debug_handle="ball_0")
    ball2 = situation_object(BALL, debug_handle="ball_1")

    ball_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[ball2, ball1],
        always_relations=[bigger_than(ball1, ball2)],
    )

    assert (ball_situation.always_relations[0].first_slot.ontology_node ==
            ball_situation.always_relations[0].second_slot.ontology_node)

    ball_perception = _PERCEPTION_GENERATOR.generate_perception(
        ball_situation, chooser=RandomChooser.for_seed(0))

    perceived_objects = ball_perception.frames[0].perceived_objects
    object_handles = set(obj.debug_handle for obj in perceived_objects)
    assert object_handles == {"**ball_0", "**ball_1", "the ground"}
    assert any(relation.relation_type == BIGGER_THAN_SAME_TYPE
               for relation in ball_perception.frames[0].relations)
    assert any(relation.relation_type == SMALLER_THAN_SAME_TYPE
               for relation in ball_perception.frames[0].relations)
def _fly_under_template(
    # A bird flies under a chair
    agent: TemplateObjectVariable,
    object_in_path: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"{agent.handle}-flies-under-{object_in_path.handle}",
        salient_object_variables=[agent, object_in_path],
        background_object_variables=background,
        actions=[
            Action(
                FLY,
                argument_roles_to_fillers=[(AGENT, agent)],
                during=DuringAction(
                    at_some_point=flatten_relations(strictly_above(object_in_path, agent))
                ),
            )
        ],
        asserted_always_relations=[negate(on(object_in_path, GROUND_OBJECT_TEMPLATE))],
        before_action_relations=[negate(on(object_in_path, GROUND_OBJECT_TEMPLATE))],
        after_action_relations=[negate(on(object_in_path, GROUND_OBJECT_TEMPLATE))],
        constraining_relations=flatten_relations(bigger_than(object_in_path, agent)),
    )
Example #11
0
def do_object_on_table_test(
    object_type_to_match: OntologyNode,
    object_schema: ObjectStructuralSchema,
    negative_object_ontology_node: OntologyNode,
):
    """
    Tests the `PerceptionGraphMatcher` can match simple objects.
    """
    # we create four situations:
    # a object_to_match above or under a table with color red or blue
    color = color_variable("color")
    object_to_match = object_variable(
        debug_handle=object_type_to_match.handle,
        root_node=object_type_to_match,
        added_properties=[color],
    )
    table = standard_object("table_0", TABLE)

    object_on_table_template = Phase1SituationTemplate(
        "object_to_match-on-table",
        salient_object_variables=[object_to_match, table],
        asserted_always_relations=[
            bigger_than(table, object_to_match),
            on(object_to_match, table),
        ],
    )

    object_under_table_template = Phase1SituationTemplate(
        "object_to_match-under-table",
        salient_object_variables=[object_to_match, table],
        asserted_always_relations=[
            bigger_than(table, object_to_match),
            above(table, object_to_match),
        ],
    )

    # We test that a perceptual pattern for "object_to_match" matches in all four cases.
    object_to_match_pattern = PerceptionGraphPattern.from_schema(
        object_schema, perception_generator=GAILA_PHASE_1_PERCEPTION_GENERATOR)

    situations_with_object_to_match = chain(
        all_possible_test(object_on_table_template),
        all_possible_test(object_under_table_template),
    )

    for (_,
         situation_with_object) in enumerate(situations_with_object_to_match):
        perception = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
            situation_with_object, chooser=RandomChooser.for_seed(0))
        perception_graph = PerceptionGraph.from_frame(perception.frames[0])
        # perception_graph.render_to_file(f"object_to_match {idx}", out_dir / f"object_to_match
        # -{idx}.pdf")
        # object_to_match_pattern.render_to_file(f"object_to_match pattern", out_dir /
        # "object_to_match_pattern.pdf")
        matcher = object_to_match_pattern.matcher(perception_graph,
                                                  match_mode=MatchMode.OBJECT)
        # debug_matching = matcher.debug_matching(
        #    use_lookahead_pruning=False, render_match_to=Path("/Users/gabbard/tmp")
        # )
        result = any(matcher.matches(use_lookahead_pruning=False))
        if not result:
            return False

    # Now let's create the same situations, but substitute a negative_object for a object_to_match.
    negative_object = object_variable(
        debug_handle=negative_object_ontology_node.handle,
        root_node=negative_object_ontology_node,
        added_properties=[color],
    )
    negative_object_on_table_template = Phase1SituationTemplate(
        "negative_object-on-table",
        salient_object_variables=[negative_object, table],
        asserted_always_relations=[
            bigger_than(table, negative_object),
            on(negative_object, table),
        ],
    )

    negative_object_under_table_template = Phase1SituationTemplate(
        "negative_object-under-table",
        salient_object_variables=[negative_object, table],
        asserted_always_relations=[
            bigger_than(table, negative_object),
            above(table, negative_object),
        ],
    )

    situations_with_negative_object = chain(
        all_possible_test(negative_object_on_table_template),
        all_possible_test(negative_object_under_table_template),
    )

    # The pattern should now fail to match.
    for situation_with_negative_object in situations_with_negative_object:
        perception = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
            situation_with_negative_object, chooser=RandomChooser.for_seed(0))
        perception_graph = PerceptionGraph.from_frame(perception.frames[0])
        if any(
                object_to_match_pattern.matcher(
                    perception_graph, match_mode=MatchMode.OBJECT).matches(
                        use_lookahead_pruning=True)):
            return False
    return True
def make_spin_tall_short_curriculum(  # pylint: disable=unused-argument
    num_samples: Optional[int],
    noise_objects: Optional[int],
    language_generator: LanguageGenerator[HighLevelSemanticsSituation,
                                          LinearizedDependencyTree],
) -> Phase1InstanceGroup:
    # "Mom spins a tall chair"
    # We generate situations directly since templates fail to generate plurals.

    learner = SituationObject.instantiate_ontology_node(
        ontology_node=LEARNER,
        debug_handle=LEARNER.handle,
        ontology=GAILA_PHASE_1_ONTOLOGY,
    )
    situations = []
    for agent_ontology_node in [MOM, DAD, BABY, DOG]:
        agent = SituationObject.instantiate_ontology_node(
            ontology_node=agent_ontology_node,
            debug_handle=agent_ontology_node.handle,
            ontology=GAILA_PHASE_1_ONTOLOGY,
        )
        for _object in [CHAIR, TABLE]:
            theme = SituationObject.instantiate_ontology_node(
                ontology_node=_object,
                debug_handle=_object.handle,
                ontology=GAILA_PHASE_1_ONTOLOGY,
            )
            other_objs = [
                SituationObject.instantiate_ontology_node(
                    ontology_node=_object,
                    debug_handle=_object.handle + f"_{i}",
                    ontology=GAILA_PHASE_1_ONTOLOGY,
                ) for i in range(3)
            ]
            computed_background = [learner]
            computed_background.extend(other_objs)

            # Tall and short
            for relation_list in [
                [bigger_than(learner, theme),
                 bigger_than(other_objs, theme)],
                [bigger_than(theme, learner),
                 bigger_than(theme, other_objs)],
            ]:
                situations.append(
                    HighLevelSemanticsSituation(
                        ontology=GAILA_PHASE_1_ONTOLOGY,
                        salient_objects=[agent, theme],
                        other_objects=computed_background,
                        actions=[
                            Action(
                                SPIN,
                                argument_roles_to_fillers=[
                                    (AGENT, agent),
                                    (THEME, theme),
                                ],
                            )
                        ],
                        always_relations=relation_list,
                        syntax_hints=[USE_VERTICAL_MODIFIERS],
                    ))

    return phase1_instances("Tall - Short Curriculum",
                            situations,
                            language_generator=language_generator)
def make_eat_big_small_curriculum(  # pylint: disable=unused-argument
    num_samples: Optional[int],
    noise_objects: Optional[int],
    language_generator: LanguageGenerator[HighLevelSemanticsSituation,
                                          LinearizedDependencyTree],
) -> Phase1InstanceGroup:
    # "Mom eats a big cookie"
    # We generate situations directly since templates fail to generate plurals.

    learner = SituationObject.instantiate_ontology_node(
        ontology_node=LEARNER,
        debug_handle=LEARNER.handle,
        ontology=GAILA_PHASE_1_ONTOLOGY,
    )
    situations = []

    for eater_ontology_node in [MOM, DAD, BABY, DOG]:
        eater = SituationObject.instantiate_ontology_node(
            ontology_node=eater_ontology_node,
            debug_handle=eater_ontology_node.handle,
            ontology=GAILA_PHASE_1_ONTOLOGY,
        )
        for _object in [COOKIE, WATERMELON]:
            object_to_eat = SituationObject.instantiate_ontology_node(
                ontology_node=_object,
                debug_handle=_object.handle,
                ontology=GAILA_PHASE_1_ONTOLOGY,
            )
            other_edibles = [
                SituationObject.instantiate_ontology_node(
                    ontology_node=_object,
                    debug_handle=_object.handle + f"_{i}",
                    ontology=GAILA_PHASE_1_ONTOLOGY,
                ) for i in range(3)
            ]
            computed_background = [learner]
            computed_background.extend(other_edibles)

            # Big
            for relation_list in [
                [
                    bigger_than(object_to_eat, learner),
                    bigger_than(object_to_eat, other_edibles),
                ],
                [
                    bigger_than(learner, object_to_eat),
                    bigger_than(other_edibles, object_to_eat),
                ],
            ]:
                situations.append(
                    HighLevelSemanticsSituation(
                        ontology=GAILA_PHASE_1_ONTOLOGY,
                        salient_objects=[eater, object_to_eat],
                        other_objects=computed_background,
                        actions=[
                            Action(
                                EAT,
                                argument_roles_to_fillers=[
                                    (AGENT, eater),
                                    (PATIENT, object_to_eat),
                                ],
                            )
                        ],
                        always_relations=relation_list,
                    ))

    return phase1_instances("Big - Small Curriculum",
                            situations,
                            language_generator=language_generator)