def test_objects_in_something_not_implcitly_grounded():
    box = situation_object(ontology_node=BOX)
    ball = situation_object(ontology_node=BALL)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[box, ball],
        always_relations=[
            Relation(
                IN_REGION,
                ball,
                Region(
                    box,
                    distance=PROXIMAL,
                    direction=Direction(
                        positive=True,
                        relative_to_axis=HorizontalAxisOfObject(box, index=0),
                    ),
                ),
            )
        ],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    ball_perception = perception_with_handle(first_frame, "**ball_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    box_perception = perception_with_handle(first_frame, "**box_0")

    first_frame_relations = first_frame.relations

    assert on(ball_perception, ground_perception)[0] in first_frame_relations
    assert on(box_perception, ground_perception)[0] in first_frame_relations
Пример #2
0
def _go_under_template(
    agent: TemplateObjectVariable,
    goal_object: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_distal: bool,  # pylint:disable=unused-argument
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"go_under-{agent.handle}-under-{goal_object.handle}",
        salient_object_variables=[agent, goal_object],
        background_object_variables=background,
        actions=[
            Action(
                GO,
                argument_roles_to_fillers=[
                    (AGENT, agent),
                    (
                        GOAL,
                        Region(
                            goal_object, distance=PROXIMAL, direction=GRAVITATIONAL_DOWN
                        ),
                    ),
                ],
            )
        ],
        before_action_relations=[negate(on(goal_object, GROUND_OBJECT_TEMPLATE))],
        asserted_always_relations=[negate(on(goal_object, GROUND_OBJECT_TEMPLATE))],
        after_action_relations=[
            negate(on(goal_object, GROUND_OBJECT_TEMPLATE)),
            near(agent, goal_object),
        ],
        constraining_relations=flatten_relations(bigger_than(goal_object, agent)),
    )
def test_perceive_explicit_relations():
    # we want to test that relations explicitly called out in the situation are perceived.
    # Such relations fall into three buckets:
    # those which hold before an action,
    # those which hold after an action,
    # and those which hold both before and after an action.
    # To test all three of these at once, we use a situation where
    # (a) Mom is putting a ball on a table
    # (b) before the action the ball is far from a box,
    # (c) but after the action the ball is near the box,
    # (d) throughout the action the box is on the table.
    mom = situation_object(MOM)
    ball = situation_object(BALL)
    box = situation_object(BOX)
    table = situation_object(TABLE)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[mom, box, ball, table],
        always_relations=[on(ball, table)],
        before_action_relations=[far(ball, box)],
        after_action_relations=[near(ball, box)],
        actions=[
            Action(
                PUT,
                argument_roles_to_fillers=[
                    (AGENT, mom),
                    (THEME, ball),
                    (
                        GOAL,
                        Region(
                            reference_object=table,
                            distance=EXTERIOR_BUT_IN_CONTACT,
                            direction=GRAVITATIONAL_UP,
                        ),
                    ),
                ],
            )
        ],
    )
    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )

    ball_perception = perception_with_handle(perception.frames[0], "**ball_0")
    box_perception = perception_with_handle(perception.frames[0], "**box_0")
    table_perception = perception_with_handle(perception.frames[0], "**table_0")

    assert only(on(ball_perception, table_perception)) in perception.frames[0].relations
    assert only(on(ball_perception, table_perception)) in perception.frames[0].relations

    assert only(far(ball_perception, box_perception)) in perception.frames[0].relations
    assert (
        only(far(ball_perception, box_perception)) not in perception.frames[1].relations
    )

    assert (
        only(near(ball_perception, box_perception)) not in perception.frames[0].relations
    )
    assert only(near(ball_perception, box_perception)) in perception.frames[1].relations
Пример #4
0
def _under_template(
    figure: TemplateObjectVariable,
    ground: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_training: bool,
    is_distal: bool,
    syntax_hints: Iterable[str] = immutableset(),
    background_relations: Iterable[TemplateObjectVariable] = immutableset(),
) -> Phase1SituationTemplate:
    handle = "training" if is_training else "testing"
    relations = [
        negate(on(figure, GROUND_OBJECT_TEMPLATE)),
        strictly_under(ground, figure, dist=DISTAL if is_distal else PROXIMAL),
    ]
    relations.extend(background_relations)  # type: ignore
    return Phase1SituationTemplate(
        f"preposition-{handle}-{figure.handle}-under-{ground.handle}",
        salient_object_variables=[figure, ground],
        background_object_variables=background,
        asserted_always_relations=flatten_relations(relations),
        constraining_relations=[bigger_than(ground, figure)],
        gazed_objects=[figure],
        syntax_hints=syntax_hints,
    )
def test_object_not_on_ground():
    """
    Intended to test that one can specify an object is not on the ground
    """
    table = situation_object(TABLE)
    ground = situation_object(GROUND)
    perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(
            ontology=GAILA_PHASE_1_ONTOLOGY,
            salient_objects=immutableset([table]),
            other_objects=immutableset([ground]),
            always_relations=flatten_relations(negate(on(table, ground))),
        ),
        chooser=RandomChooser.for_seed(0),
    )
    frame = perception.frames[0]
    relations = frame.relations
    table_perception = perception_with_handle(frame, "**table_0")
    ground_perception = perception_with_handle(frame, "the ground")
    assert not any(
        relation.relation_type == IN_REGION
        and relation.first_slot == table_perception
        and isinstance(relation.second_slot, Region)
        and relation.region.reference_object == ground_perception
        and relation.region.distance == EXTERIOR_BUT_IN_CONTACT
        for relation in relations
    )
Пример #6
0
def test_before_after_relations_asserted():
    ball = object_variable("ball", root_node=BALL)
    box = object_variable("box", root_node=BOX)
    ground = object_variable("ground", root_node=GROUND)

    template_action = Phase1SituationTemplate(
        "Before/After Relation",
        salient_object_variables=[ball, box],
        background_object_variables=[ground],
        actions=[
            Action(
                ROLL,
                argument_roles_to_fillers=[(AGENT, ball)],
                auxiliary_variable_bindings=[(ROLL_SURFACE_AUXILIARY, ground)],
            )
        ],
        before_action_relations=flatten_relations([on(ball, box)]),
        after_action_relations=flatten_relations([far(ball, box)]),
    )

    situation_with_relations = tuple(
        sampled(
            template_action,
            ontology=GAILA_PHASE_1_ONTOLOGY,
            chooser=RandomChooser.for_seed(0),
            max_to_sample=1,
        ))

    assert situation_with_relations[0].before_action_relations
    assert situation_with_relations[0].after_action_relations
def test_relations_between_objects_and_ground():
    # person_put_ball_on_table
    person = situation_object(ontology_node=PERSON)
    ball = situation_object(ontology_node=BALL)
    table = situation_object(ontology_node=TABLE)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[person, ball, table],
        actions=[
            # What is the best way of representing the destination in the high-level semantics?
            # Here we represent it as indicating a relation which should be true.
            Action(
                PUT,
                (
                    (AGENT, person),
                    (THEME, ball),
                    (
                        GOAL,
                        Region(
                            reference_object=table,
                            distance=EXTERIOR_BUT_IN_CONTACT,
                            direction=GRAVITATIONAL_UP,
                        ),
                    ),
                ),
            )
        ],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    ball_perception = perception_with_handle(first_frame, "**ball_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    table_perception = perception_with_handle(first_frame, "**table_0")

    first_frame_relations = first_frame.relations
    second_frame_relations = perception.frames[1].relations

    assert on(ball_perception, ground_perception)[0] in first_frame_relations
    assert on(ball_perception, ground_perception)[0] not in second_frame_relations
    assert on(table_perception, ground_perception)[0] in first_frame_relations
    assert on(table_perception, ground_perception)[0] in second_frame_relations
def test_grounding_of_unsupported_objects():
    box = situation_object(ontology_node=BOX)
    ball = situation_object(ontology_node=BALL)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[box, ball],
        always_relations=[Relation(IN_REGION, ball, Region(box, distance=INTERIOR))],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    ball_perception = perception_with_handle(first_frame, "**ball_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    box_perception = perception_with_handle(first_frame, "**box_0")

    first_frame_relations = first_frame.relations

    assert on(ball_perception, ground_perception)[0] not in first_frame_relations
    assert on(box_perception, ground_perception)[0] in first_frame_relations
def test_dynamic_prepositions_implicit_grounding():
    truck = situation_object(ontology_node=TRUCK)
    baby = situation_object(ontology_node=BABY)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[baby, truck],
        actions=[Action(FALL, argument_roles_to_fillers=[(THEME, baby)])],
        after_action_relations=[on(baby, truck)],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    baby_perception = perception_with_handle(first_frame, "**baby_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    truck_perception = perception_with_handle(first_frame, "**truck_0")

    first_frame_relations = first_frame.relations
    second_frame_relations = perception.frames[1].relations

    assert on(baby_perception, truck_perception)[0] not in first_frame_relations
    assert on(baby_perception, truck_perception)[0] in second_frame_relations
    assert on(baby_perception, ground_perception)[0] not in second_frame_relations
    assert on(truck_perception, ground_perception)[0] in first_frame_relations
    assert on(truck_perception, ground_perception)[0] in second_frame_relations
Пример #10
0
def _fly_under_template(
    # A bird flies under a chair
    agent: TemplateObjectVariable,
    object_in_path: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"{agent.handle}-flies-under-{object_in_path.handle}",
        salient_object_variables=[agent, object_in_path],
        background_object_variables=background,
        actions=[
            Action(
                FLY,
                argument_roles_to_fillers=[(AGENT, agent)],
                during=DuringAction(
                    at_some_point=flatten_relations(strictly_above(object_in_path, agent))
                ),
            )
        ],
        asserted_always_relations=[negate(on(object_in_path, GROUND_OBJECT_TEMPLATE))],
        before_action_relations=[negate(on(object_in_path, GROUND_OBJECT_TEMPLATE))],
        after_action_relations=[negate(on(object_in_path, GROUND_OBJECT_TEMPLATE))],
        constraining_relations=flatten_relations(bigger_than(object_in_path, agent)),
    )
Пример #11
0
def _on_template(
    figure: TemplateObjectVariable,
    ground: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_training: bool,
) -> Phase1SituationTemplate:
    handle = "training" if is_training else "testing"
    return Phase1SituationTemplate(
        f"preposition-{handle}-{figure.handle}-on-{ground.handle}",
        salient_object_variables=[figure, ground],
        background_object_variables=background,
        asserted_always_relations=[on(figure, ground)],
        gazed_objects=[figure],
    )
def test_liquid_in_and_out_of_container():
    juice = situation_object(JUICE)
    box = situation_object(ontology_node=BOX)
    table = situation_object(ontology_node=TABLE)
    two_d_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[juice, table],
        always_relations=[on(juice, table)],
    )
    two_d_perception = _PERCEPTION_GENERATOR.generate_perception(
        two_d_situation, chooser=RandomChooser.for_seed(0)
    )

    three_d_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[juice, box],
        always_relations=[
            Relation(IN_REGION, juice, Region(box, distance=INTERIOR, direction=None))
        ],
    )
    three_d_perception = _PERCEPTION_GENERATOR.generate_perception(
        three_d_situation, chooser=RandomChooser.for_seed(0)
    )

    two_perceived_objects = two_d_perception.frames[0].perceived_objects
    two_object_handles = set(obj.debug_handle for obj in two_perceived_objects)
    assert all(handle in two_object_handles for handle in {"**juice_0", "**table_0"})
    three_perceived_objects = three_d_perception.frames[0].perceived_objects
    three_object_handles = set(obj.debug_handle for obj in three_perceived_objects)
    assert all(handle in three_object_handles for handle in {"**juice_0", "**box_0"})

    assert any(
        isinstance(p, HasBinaryProperty)
        and perception_with_handle(two_d_perception.frames[0], "**juice_0")
        == p.perceived_object
        and p.binary_property == TWO_DIMENSIONAL
        for p in two_d_perception.frames[0].property_assertions
    )
    assert not any(
        isinstance(p, HasBinaryProperty)
        and perception_with_handle(three_d_perception.frames[0], "**juice_0")
        == p.perceived_object
        and p.binary_property == TWO_DIMENSIONAL
        for p in three_d_perception.frames[0].property_assertions
    )
Пример #13
0
def _on_template(
    figure: TemplateObjectVariable,
    ground: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
    *,
    is_training: bool,
    background_relations: Iterable[Relation[Any]] = immutableset(),
) -> Phase1SituationTemplate:
    handle = "training" if is_training else "testing"
    relations = [on(figure, ground)]
    relations.extend(background_relations)  # type: ignore
    return Phase1SituationTemplate(
        f"preposition-{handle}-{figure.handle}-on-{ground.handle}",
        salient_object_variables=[figure, ground],
        background_object_variables=background,
        asserted_always_relations=flatten_relations(relations),
        gazed_objects=[figure],
    )
Пример #14
0
def _jump_over_template(
    # "Mom jumps over a ball"
    agent: TemplateObjectVariable,
    object_in_path: TemplateObjectVariable,
    background: Iterable[TemplateObjectVariable],
) -> Phase1SituationTemplate:
    return Phase1SituationTemplate(
        f"{agent.handle}-jumps-over-{object_in_path.handle}",
        salient_object_variables=[agent, object_in_path],
        background_object_variables=background,
        actions=[
            Action(
                JUMP,
                argument_roles_to_fillers=[(AGENT, agent)],
                during=DuringAction(
                    at_some_point=flatten_relations(
                        strictly_above(agent, object_in_path)
                    ),
                    objects_to_paths=[
                        (
                            agent,
                            SpatialPath(
                                operator=VIA,
                                reference_source_object=Region(
                                    object_in_path,
                                    direction=GRAVITATIONAL_UP,
                                    distance=DISTAL,
                                ),
                                reference_destination_object=GROUND_OBJECT_TEMPLATE,
                            ),
                        )
                    ],
                ),
                auxiliary_variable_bindings=[
                    (JUMP_INITIAL_SUPPORTER_AUX, GROUND_OBJECT_TEMPLATE)
                ],
            )
        ],
        asserted_always_relations=[negate(on(agent, GROUND_OBJECT_TEMPLATE))],
    )
Пример #15
0
def do_object_on_table_test(
    object_type_to_match: OntologyNode,
    object_schema: ObjectStructuralSchema,
    negative_object_ontology_node: OntologyNode,
):
    """
    Tests the `PerceptionGraphMatcher` can match simple objects.
    """
    # we create four situations:
    # a object_to_match above or under a table with color red or blue
    color = color_variable("color")
    object_to_match = object_variable(
        debug_handle=object_type_to_match.handle,
        root_node=object_type_to_match,
        added_properties=[color],
    )
    table = standard_object("table_0", TABLE)

    object_on_table_template = Phase1SituationTemplate(
        "object_to_match-on-table",
        salient_object_variables=[object_to_match, table],
        asserted_always_relations=[
            bigger_than(table, object_to_match),
            on(object_to_match, table),
        ],
    )

    object_under_table_template = Phase1SituationTemplate(
        "object_to_match-under-table",
        salient_object_variables=[object_to_match, table],
        asserted_always_relations=[
            bigger_than(table, object_to_match),
            above(table, object_to_match),
        ],
    )

    # We test that a perceptual pattern for "object_to_match" matches in all four cases.
    object_to_match_pattern = PerceptionGraphPattern.from_schema(
        object_schema, perception_generator=GAILA_PHASE_1_PERCEPTION_GENERATOR)

    situations_with_object_to_match = chain(
        all_possible_test(object_on_table_template),
        all_possible_test(object_under_table_template),
    )

    for (_,
         situation_with_object) in enumerate(situations_with_object_to_match):
        perception = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
            situation_with_object, chooser=RandomChooser.for_seed(0))
        perception_graph = PerceptionGraph.from_frame(perception.frames[0])
        # perception_graph.render_to_file(f"object_to_match {idx}", out_dir / f"object_to_match
        # -{idx}.pdf")
        # object_to_match_pattern.render_to_file(f"object_to_match pattern", out_dir /
        # "object_to_match_pattern.pdf")
        matcher = object_to_match_pattern.matcher(perception_graph,
                                                  match_mode=MatchMode.OBJECT)
        # debug_matching = matcher.debug_matching(
        #    use_lookahead_pruning=False, render_match_to=Path("/Users/gabbard/tmp")
        # )
        result = any(matcher.matches(use_lookahead_pruning=False))
        if not result:
            return False

    # Now let's create the same situations, but substitute a negative_object for a object_to_match.
    negative_object = object_variable(
        debug_handle=negative_object_ontology_node.handle,
        root_node=negative_object_ontology_node,
        added_properties=[color],
    )
    negative_object_on_table_template = Phase1SituationTemplate(
        "negative_object-on-table",
        salient_object_variables=[negative_object, table],
        asserted_always_relations=[
            bigger_than(table, negative_object),
            on(negative_object, table),
        ],
    )

    negative_object_under_table_template = Phase1SituationTemplate(
        "negative_object-under-table",
        salient_object_variables=[negative_object, table],
        asserted_always_relations=[
            bigger_than(table, negative_object),
            above(table, negative_object),
        ],
    )

    situations_with_negative_object = chain(
        all_possible_test(negative_object_on_table_template),
        all_possible_test(negative_object_under_table_template),
    )

    # The pattern should now fail to match.
    for situation_with_negative_object in situations_with_negative_object:
        perception = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
            situation_with_negative_object, chooser=RandomChooser.for_seed(0))
        perception_graph = PerceptionGraph.from_frame(perception.frames[0])
        if any(
                object_to_match_pattern.matcher(
                    perception_graph, match_mode=MatchMode.OBJECT).matches(
                        use_lookahead_pruning=True)):
            return False
    return True
Пример #16
0
def background_relations_builder(
    background_objects: Iterable[TemplateObjectVariable],
    num_relations: int,
    *,
    target: Optional[TemplateObjectVariable] = None,
    target_2: Optional[TemplateObjectVariable] = None,
    add_noise: bool = True,
    include_targets_in_noise: bool = False,
    chooser: RandomChooser = RandomChooser.for_seed(0),
) -> Iterable[Relation[Any]]:
    if add_noise:
        potential_objects = list(background_objects)
        if target and include_targets_in_noise:
            potential_objects.append(target)
        if target_2 and include_targets_in_noise:
            potential_objects.append(target_2)

        if len(potential_objects) < 2:
            return immutableset()

        relations = []
        for _ in range(num_relations):
            choice = chooser.choice(NOISE_RELATION_DSL_OPTIONS)
            if choice == "on":
                relations.append(
                    on(
                        chooser.choice(potential_objects),
                        chooser.choice(potential_objects),
                    ))
            elif choice == "beside":
                obj_choice_2 = chooser.choice(potential_objects)
                relations.append(
                    near(
                        chooser.choice(potential_objects),
                        obj_choice_2,
                        direction=Direction(
                            positive=chooser.choice(BOOL_SET),
                            relative_to_axis=HorizontalAxisOfObject(
                                obj_choice_2, index=0),
                        ),
                    ))
            elif choice == "under":
                relations.append(
                    strictly_under(
                        chooser.choice(potential_objects),
                        chooser.choice(potential_objects),
                        dist=DISTAL if chooser.choice(BOOL_SET) else PROXIMAL,
                    ))
            elif choice == "in_front":
                obj_choice_2 = chooser.choice(potential_objects)
                direction = Direction(
                    positive=chooser.choice(BOOL_SET),
                    relative_to_axis=FacingAddresseeAxis(obj_choice_2),
                )
                relations.append(
                    near(
                        chooser.choice(potential_objects),
                        obj_choice_2,
                        direction=direction,
                    ) if chooser.choice(BOOL_SET) else far(
                        chooser.choice(potential_objects),
                        obj_choice_2,
                        direction=direction,
                    ))
            else:
                raise RuntimeError(
                    "Invalid relation type in background relations")

        return flatten_relations(relations)
    else:
        return immutableset()
Пример #17
0
def test_subset_learner_subobject():
    mom = SituationObject.instantiate_ontology_node(
        ontology_node=MOM, ontology=GAILA_PHASE_1_ONTOLOGY)
    head = SituationObject.instantiate_ontology_node(
        ontology_node=HEAD, ontology=GAILA_PHASE_1_ONTOLOGY)
    hand = SituationObject.instantiate_ontology_node(
        ontology_node=HAND, ontology=GAILA_PHASE_1_ONTOLOGY)
    ball = SituationObject.instantiate_ontology_node(
        ontology_node=BALL, ontology=GAILA_PHASE_1_ONTOLOGY)
    house = SituationObject.instantiate_ontology_node(
        ontology_node=HOUSE, ontology=GAILA_PHASE_1_ONTOLOGY)
    ground = SituationObject.instantiate_ontology_node(
        ontology_node=GROUND, ontology=GAILA_PHASE_1_ONTOLOGY)

    mom_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=immutableset([mom]))

    floating_head_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([head]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(head, ground))),
    )

    # Need to include some extra situations so that the learner will prune its semantics for 'a'
    # away and not recognize it as an object.
    floating_hand_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([hand]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(hand, ground))),
    )

    floating_ball_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([ball]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(ball, ground))),
    )

    floating_house_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([house]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(house, ground))),
    )

    object_learner = SubsetObjectLearnerNew(ontology=GAILA_PHASE_1_ONTOLOGY,
                                            beam_size=5,
                                            language_mode=LanguageMode.ENGLISH)

    for situation in [
            mom_situation,
            floating_head_situation,
            floating_hand_situation,
            floating_ball_situation,
            floating_house_situation,
    ]:
        perceptual_representation = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
            situation, chooser=RandomChooser.for_seed(0))
        for linguistic_description in GAILA_PHASE_1_LANGUAGE_GENERATOR.generate_language(
                situation, chooser=RandomChooser.for_seed(0)):
            perception_graph = PerceptionGraph.from_frame(
                perceptual_representation.frames[0])

            object_learner.learn_from(
                LanguagePerceptionSemanticAlignment(
                    language_concept_alignment=LanguageConceptAlignment.
                    create_unaligned(language=linguistic_description),
                    perception_semantic_alignment=PerceptionSemanticAlignment(
                        perception_graph=perception_graph, semantic_nodes=[]),
                ))

    mom_perceptual_representation = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
        mom_situation, chooser=RandomChooser.for_seed(0))
    perception_graph = PerceptionGraph.from_frame(
        mom_perceptual_representation.frames[0])
    enriched = object_learner.enrich_during_description(
        PerceptionSemanticAlignment.create_unaligned(perception_graph))

    semantic_node_types_and_debug_strings = {
        (type(semantic_node), semantic_node.concept.debug_string)
        for semantic_node in enriched.semantic_nodes
    }
    assert (ObjectSemanticNode, "Mom") in semantic_node_types_and_debug_strings
    assert (ObjectSemanticNode,
            "head") in semantic_node_types_and_debug_strings
    assert (ObjectSemanticNode,
            "hand") in semantic_node_types_and_debug_strings