예제 #1
0
def test_before_after_relations_asserted():
    ball = object_variable("ball", root_node=BALL)
    box = object_variable("box", root_node=BOX)
    ground = object_variable("ground", root_node=GROUND)

    template_action = Phase1SituationTemplate(
        "Before/After Relation",
        salient_object_variables=[ball, box],
        background_object_variables=[ground],
        actions=[
            Action(
                ROLL,
                argument_roles_to_fillers=[(AGENT, ball)],
                auxiliary_variable_bindings=[(ROLL_SURFACE_AUXILIARY, ground)],
            )
        ],
        before_action_relations=flatten_relations([on(ball, box)]),
        after_action_relations=flatten_relations([far(ball, box)]),
    )

    situation_with_relations = tuple(
        sampled(
            template_action,
            ontology=GAILA_PHASE_1_ONTOLOGY,
            chooser=RandomChooser.for_seed(0),
            max_to_sample=1,
        ))

    assert situation_with_relations[0].before_action_relations
    assert situation_with_relations[0].after_action_relations
예제 #2
0
 def build_object_multiples_situations(
         ontology: Ontology,
         *,
         samples_per_object: int = 3,
         chooser: RandomChooser) -> Iterable[HighLevelSemanticsSituation]:
     for object_type in PHASE_1_CURRICULUM_OBJECTS:
         # Exclude slow objects for now
         if object_type.handle in ["bird", "dog", "truck"]:
             continue
         is_liquid = ontology.has_all_properties(object_type, [LIQUID])
         # don't want multiples of named people
         if not is_recognized_particular(ontology,
                                         object_type) and not is_liquid:
             for _ in range(samples_per_object):
                 num_objects = chooser.choice(range(2, 4))
                 yield HighLevelSemanticsSituation(
                     ontology=GAILA_PHASE_1_ONTOLOGY,
                     salient_objects=[
                         SituationObject.instantiate_ontology_node(
                             ontology_node=object_type,
                             debug_handle=object_type.handle + f"_{idx}",
                             ontology=GAILA_PHASE_1_ONTOLOGY,
                         ) for idx in range(num_objects)
                     ],
                     axis_info=AxesInfo(),
                 )
def test_liquid_perceivable():
    juice_perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(ontology=GAILA_PHASE_1_ONTOLOGY,
                                    salient_objects=[situation_object(JUICE)]),
        chooser=RandomChooser.for_seed(0),
    ).frames[0]
    assert _some_object_has_binary_property(juice_perception, LIQUID)
def test_gaze_specified():
    cookie = situation_object(COOKIE)
    table = situation_object(TABLE)
    dad = situation_object(DAD)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[cookie, table, dad],
        actions=[
            Action(EAT,
                   argument_roles_to_fillers=[(AGENT, dad), (PATIENT, cookie)])
        ],
        gazed_objects=[cookie],
    )
    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0))
    frame = perception.frames[0]
    cookie_perception = perception_with_handle(frame, "**cookie_0")
    dad_perception = perception_with_handle(frame, "**Dad_0")
    table_perception = perception_with_handle(frame, "**table_0")
    # only the cookie is gazed at, because the user said so.
    assert HasBinaryProperty(cookie_perception,
                             GAZED_AT) in frame.property_assertions
    assert HasBinaryProperty(dad_perception,
                             GAZED_AT) not in frame.property_assertions
    assert HasBinaryProperty(table_perception,
                             GAZED_AT) not in frame.property_assertions
def test_objects_in_something_not_implcitly_grounded():
    box = situation_object(ontology_node=BOX)
    ball = situation_object(ontology_node=BALL)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[box, ball],
        always_relations=[
            Relation(
                IN_REGION,
                ball,
                Region(
                    box,
                    distance=PROXIMAL,
                    direction=Direction(
                        positive=True,
                        relative_to_axis=HorizontalAxisOfObject(box, index=0),
                    ),
                ),
            )
        ],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    ball_perception = perception_with_handle(first_frame, "**ball_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    box_perception = perception_with_handle(first_frame, "**box_0")

    first_frame_relations = first_frame.relations

    assert on(ball_perception, ground_perception)[0] in first_frame_relations
    assert on(box_perception, ground_perception)[0] in first_frame_relations
def test_dynamic_prepositions_implicit_grounding():
    truck = situation_object(ontology_node=TRUCK)
    baby = situation_object(ontology_node=BABY)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[baby, truck],
        actions=[Action(FALL, argument_roles_to_fillers=[(THEME, baby)])],
        after_action_relations=[on(baby, truck)],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    baby_perception = perception_with_handle(first_frame, "**baby_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    truck_perception = perception_with_handle(first_frame, "**truck_0")

    first_frame_relations = first_frame.relations
    second_frame_relations = perception.frames[1].relations

    assert on(baby_perception, truck_perception)[0] not in first_frame_relations
    assert on(baby_perception, truck_perception)[0] in second_frame_relations
    assert on(baby_perception, ground_perception)[0] not in second_frame_relations
    assert on(truck_perception, ground_perception)[0] in first_frame_relations
    assert on(truck_perception, ground_perception)[0] in second_frame_relations
예제 #7
0
def test_recognizes_ontology_objects(object_type, language_mode):
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[
            SituationObject.instantiate_ontology_node(
                ontology_node=object_type, ontology=GAILA_PHASE_1_ONTOLOGY)
        ],
    )
    perception_generator = HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator(
        GAILA_PHASE_1_ONTOLOGY)
    perception = perception_generator.generate_perception(
        situation, chooser=RandomChooser.for_seed(0), include_ground=False)
    learner = IntegratedTemplateLearner(
        object_learner=LANGUAGE_MODE_TO_TEMPLATE_LEARNER_OBJECT_RECOGNIZER[
            language_mode])
    descriptions = learner.describe(perception)
    assert descriptions
    if language_mode == LanguageMode.ENGLISH:
        assert object_type.handle in one(
            descriptions.items())[0].as_token_sequence()
    else:
        mappings = (
            GAILA_PHASE_1_CHINESE_LEXICON._ontology_node_to_word  # pylint:disable=protected-access
        )
        for k, v in mappings.items():
            if k.handle == object_type.handle:
                assert v.base_form in one(
                    descriptions.items())[0].as_token_sequence()
def test_object_not_on_ground():
    """
    Intended to test that one can specify an object is not on the ground
    """
    table = situation_object(TABLE)
    ground = situation_object(GROUND)
    perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(
            ontology=GAILA_PHASE_1_ONTOLOGY,
            salient_objects=immutableset([table]),
            other_objects=immutableset([ground]),
            always_relations=flatten_relations(negate(on(table, ground))),
        ),
        chooser=RandomChooser.for_seed(0),
    )
    frame = perception.frames[0]
    relations = frame.relations
    table_perception = perception_with_handle(frame, "**table_0")
    ground_perception = perception_with_handle(frame, "the ground")
    assert not any(
        relation.relation_type == IN_REGION
        and relation.first_slot == table_perception
        and isinstance(relation.second_slot, Region)
        and relation.region.reference_object == ground_perception
        and relation.region.distance == EXTERIOR_BUT_IN_CONTACT
        for relation in relations
    )
def test_person_and_ball_color():
    person = situation_object(PERSON)
    ball = situation_object(BALL, properties=[RED])

    person_and_ball_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=[person, ball]
    )

    person_and_ball_perception = _PERCEPTION_GENERATOR.generate_perception(
        person_and_ball_situation, chooser=RandomChooser.for_seed(0)
    )

    assert len(person_and_ball_perception.frames) == 1
    frame = person_and_ball_perception.frames[0]

    assert (
        prop_assertion is PropertyPerception
        for prop_assertion in frame.property_assertions
    )

    person_perception = perception_with_handle(frame, "**person_0")
    ball_perception = perception_with_handle(frame, "**ball_0")
    assert HasBinaryProperty(person_perception, ANIMATE) in frame.property_assertions
    assert HasBinaryProperty(person_perception, SELF_MOVING) in frame.property_assertions
    assert HasBinaryProperty(ball_perception, INANIMATE) in frame.property_assertions
    assert any(
        isinstance(property_, HasColor) and property_.perceived_object == ball_perception
        for property_ in frame.property_assertions
    )
예제 #10
0
def test_trivial_dynamic_situation_with_schemaless_object(language_mode):
    dad_situation_object = SituationObject.instantiate_ontology_node(
        ontology_node=DAD, ontology=GAILA_PHASE_1_ONTOLOGY)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[dad_situation_object])
    perception_generator = HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator(
        GAILA_PHASE_1_ONTOLOGY)
    # We explicitly exclude ground in perception generation

    # this generates a static perception...
    perception = perception_generator.generate_perception(
        situation, chooser=RandomChooser.for_seed(0), include_ground=False)

    # so we need to construct a dynamic one by hand from two identical scenes
    dynamic_perception = PerceptualRepresentation(
        frames=[perception.frames[0], perception.frames[0]])

    perception_graph = PerceptionGraph.from_dynamic_perceptual_representation(
        dynamic_perception)
    perception_semantic_alignment = PerceptionSemanticAlignment.create_unaligned(
        perception_graph)
    (_, description_to_matched_semantic_node
     ) = LANGUAGE_MODE_TO_OBJECT_RECOGNIZER[language_mode].match_objects(
         perception_semantic_alignment)
    assert len(description_to_matched_semantic_node) == 1
    assert (language_mode == LanguageMode.ENGLISH and
            ("Dad", ) in description_to_matched_semantic_node) or (
                language_mode == LanguageMode.CHINESE and
                ("ba4 ba4", ) in description_to_matched_semantic_node)
예제 #11
0
def test_two_objects():
    two_object_template = Phase1SituationTemplate(
        "two-objects",
        salient_object_variables=[
            object_variable("person", root_node=_PERSON),
            object_variable("toy_vehicle", required_properties=[_TOY_VEHICLE]),
        ],
    )

    reference_object_sets = {
        immutableset(["mom", "toy_truck"]),
        immutableset(["dad", "toy_truck"]),
        immutableset(["learner", "toy_truck"]),
        immutableset(["mom", "toy_car"]),
        immutableset(["dad", "toy_car"]),
        immutableset(["learner", "toy_car"]),
    }

    generated_object_sets = set(
        immutableset(situation_object.ontology_node.handle
                     for situation_object in situation.salient_objects)
        for situation in all_possible(
            two_object_template,
            ontology=_TESTING_ONTOLOGY,
            chooser=RandomChooser.for_seed(0),
            default_addressee_node=_LEARNER,
        ))

    assert generated_object_sets == reference_object_sets
def test_colors_across_part_of_relations():
    """
    Intended to test color inheritance across part-of relations
    with objects that have a prototypical color
    """
    learner_perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(
            ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=[situation_object(TRUCK)]
        ),
        chooser=RandomChooser.for_seed(0),
    )
    frame = learner_perception.frames[0]
    property_assertions = frame.property_assertions

    truck_perception = perception_with_handle(frame, "**truck_0")
    flatbed_perception = perception_with_handle(frame, "**flatbed_0")
    tire0_perception = perception_with_handle(frame, "**tire_0")
    tire1_perception = perception_with_handle(frame, "**tire_1")
    tire2_perception = perception_with_handle(frame, "**tire_2")
    tire3_perception = perception_with_handle(frame, "**tire_3")
    tire4_perception = perception_with_handle(frame, "**tire_4")
    tire5_perception = perception_with_handle(frame, "**tire_5")
    tire6_perception = perception_with_handle(frame, "**tire_6")
    tire7_perception = perception_with_handle(frame, "**tire_7")
    blue_options = COLORS_TO_RGBS[BLUE]
    blue_perceptions = [RgbColorPerception(r, g, b) for r, g, b in blue_options]
    red_options = COLORS_TO_RGBS[RED]
    red_perceptions = [RgbColorPerception(r, g, b) for r, g, b in red_options]
    black_options = COLORS_TO_RGBS[BLACK]
    black_perceptions = [RgbColorPerception(r, g, b) for r, g, b in black_options]

    assert any(
        HasColor(truck_perception, blue_perception) in property_assertions
        for blue_perception in blue_perceptions
    ) or any(
        HasColor(truck_perception, red_perception) in property_assertions
        for red_perception in red_perceptions
    )
    assert any(
        HasColor(flatbed_perception, blue_perception) in property_assertions
        for blue_perception in blue_perceptions
    ) or any(
        HasColor(flatbed_perception, red_perception) in property_assertions
        for red_perception in red_perceptions
    )
    assert any(
        (
            HasColor(tire0_perception, black_perception)
            and HasColor(tire1_perception, black_perception)
            and HasColor(tire2_perception, black_perception)
            and HasColor(tire3_perception, black_perception)
            and HasColor(tire4_perception, black_perception)
            and HasColor(tire5_perception, black_perception)
            and HasColor(tire6_perception, black_perception)
            and HasColor(tire7_perception, black_perception)
        )
        in property_assertions
        for black_perception in black_perceptions
    )
def test_perceive_explicit_relations():
    # we want to test that relations explicitly called out in the situation are perceived.
    # Such relations fall into three buckets:
    # those which hold before an action,
    # those which hold after an action,
    # and those which hold both before and after an action.
    # To test all three of these at once, we use a situation where
    # (a) Mom is putting a ball on a table
    # (b) before the action the ball is far from a box,
    # (c) but after the action the ball is near the box,
    # (d) throughout the action the box is on the table.
    mom = situation_object(MOM)
    ball = situation_object(BALL)
    box = situation_object(BOX)
    table = situation_object(TABLE)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[mom, box, ball, table],
        always_relations=[on(ball, table)],
        before_action_relations=[far(ball, box)],
        after_action_relations=[near(ball, box)],
        actions=[
            Action(
                PUT,
                argument_roles_to_fillers=[
                    (AGENT, mom),
                    (THEME, ball),
                    (
                        GOAL,
                        Region(
                            reference_object=table,
                            distance=EXTERIOR_BUT_IN_CONTACT,
                            direction=GRAVITATIONAL_UP,
                        ),
                    ),
                ],
            )
        ],
    )
    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )

    ball_perception = perception_with_handle(perception.frames[0], "**ball_0")
    box_perception = perception_with_handle(perception.frames[0], "**box_0")
    table_perception = perception_with_handle(perception.frames[0], "**table_0")

    assert only(on(ball_perception, table_perception)) in perception.frames[0].relations
    assert only(on(ball_perception, table_perception)) in perception.frames[0].relations

    assert only(far(ball_perception, box_perception)) in perception.frames[0].relations
    assert (
        only(far(ball_perception, box_perception)) not in perception.frames[1].relations
    )

    assert (
        only(near(ball_perception, box_perception)) not in perception.frames[0].relations
    )
    assert only(near(ball_perception, box_perception)) in perception.frames[1].relations
def test_liquid_in_and_out_of_container():
    juice = situation_object(JUICE)
    box = situation_object(ontology_node=BOX)
    table = situation_object(ontology_node=TABLE)
    two_d_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[juice, table],
        always_relations=[on(juice, table)],
    )
    two_d_perception = _PERCEPTION_GENERATOR.generate_perception(
        two_d_situation, chooser=RandomChooser.for_seed(0)
    )

    three_d_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[juice, box],
        always_relations=[
            Relation(IN_REGION, juice, Region(box, distance=INTERIOR, direction=None))
        ],
    )
    three_d_perception = _PERCEPTION_GENERATOR.generate_perception(
        three_d_situation, chooser=RandomChooser.for_seed(0)
    )

    two_perceived_objects = two_d_perception.frames[0].perceived_objects
    two_object_handles = set(obj.debug_handle for obj in two_perceived_objects)
    assert all(handle in two_object_handles for handle in {"**juice_0", "**table_0"})
    three_perceived_objects = three_d_perception.frames[0].perceived_objects
    three_object_handles = set(obj.debug_handle for obj in three_perceived_objects)
    assert all(handle in three_object_handles for handle in {"**juice_0", "**box_0"})

    assert any(
        isinstance(p, HasBinaryProperty)
        and perception_with_handle(two_d_perception.frames[0], "**juice_0")
        == p.perceived_object
        and p.binary_property == TWO_DIMENSIONAL
        for p in two_d_perception.frames[0].property_assertions
    )
    assert not any(
        isinstance(p, HasBinaryProperty)
        and perception_with_handle(three_d_perception.frames[0], "**juice_0")
        == p.perceived_object
        and p.binary_property == TWO_DIMENSIONAL
        for p in three_d_perception.frames[0].property_assertions
    )
예제 #15
0
def all_possible_test(
    template: Phase1SituationTemplate
) -> Iterable[HighLevelSemanticsSituation]:
    """
    Shorcut for `all_possible` with the GAILA phase 1 ontology and a `RandomChooser` with seed 0.
    """
    return all_possible(template,
                        chooser=RandomChooser.for_seed(0),
                        ontology=GAILA_PHASE_1_ONTOLOGY)
예제 #16
0
def test_simple_experiment():
    language_generator = SingleObjectLanguageGenerator(
        GAILA_PHASE_1_ENGLISH_LEXICON)
    perception_generator = DummyVisualPerceptionGenerator()

    only_show_truck = GeneratedFromSituationsInstanceGroup(
        name="only-ball",
        situations=[
            LocatedObjectSituation([(
                SituationObject.instantiate_ontology_node(
                    BALL, ontology=GAILA_PHASE_1_ONTOLOGY),
                Point(0.0, 0.0, 0.0),
            )])
        ],
        language_generator=language_generator,
        perception_generator=perception_generator,
        chooser=RandomChooser.for_seed(0),
    )

    pre_acc = CandidateAccuracyObserver("pre")
    post_acc = CandidateAccuracyObserver("post")
    test_acc = CandidateAccuracyObserver("test")

    experiment = Experiment(
        name="simple",
        training_stages=[only_show_truck],
        learner_factory=MemorizingLanguageLearner,
        pre_example_training_observers=[
            TopChoiceExactMatchObserver("pre"), pre_acc
        ],
        post_example_training_observers=[
            TopChoiceExactMatchObserver("post"), post_acc
        ],
        warm_up_test_instance_groups=[only_show_truck],
        test_instance_groups=[only_show_truck],
        test_observers=[TopChoiceExactMatchObserver("test"), test_acc],
        sequence_chooser=RandomChooser.for_seed(0),
    )

    execute_experiment(experiment)

    assert pre_acc.accuracy() == 0.0
    assert post_acc.accuracy() == 1.0
    assert test_acc.accuracy() == 1.0
def test_person_and_ball():
    person = situation_object(PERSON)
    ball = situation_object(BALL)

    person_and_ball_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=[person, ball]
    )

    person_and_ball_perception = _PERCEPTION_GENERATOR.generate_perception(
        person_and_ball_situation, chooser=RandomChooser.for_seed(0)
    )

    perceived_objects = person_and_ball_perception.frames[0].perceived_objects
    object_handles = set(obj.debug_handle for obj in perceived_objects)
    assert len(person_and_ball_perception.frames) == 1
    assert object_handles == {
        "**ball_0",
        "**person_0",
        "**head_0",
        "**torso_0",
        "**arm_0",
        "**armsegment_0",
        "**armsegment_1",
        "**hand_0",
        "**arm_1",
        "**armsegment_2",
        "**armsegment_3",
        "**hand_1",
        "**(animal) leg_0",
        "**leg-segment_0",
        "**leg-segment_1",
        "**foot_0",
        "**(animal) leg_1",
        "**leg-segment_2",
        "**leg-segment_3",
        "**foot_1",
        "the ground",
    }

    assert person_and_ball_perception.frames[0].relations

    person_perception = perception_with_handle(
        person_and_ball_perception.frames[0], "**person_0"
    )
    ball_perception = perception_with_handle(
        person_and_ball_perception.frames[0], "**ball_0"
    )

    assert set(person_and_ball_perception.frames[0].property_assertions).issuperset(
        {
            HasBinaryProperty(person_perception, ANIMATE),
            HasBinaryProperty(person_perception, SELF_MOVING),
            HasBinaryProperty(ball_perception, INANIMATE),
        }
    )
예제 #18
0
def test_choose_first():
    generator = ChooseFirstLanguageGenerator(DummyLanguageGenerator())
    # pycharm fails to recognize converter
    # noinspection PyTypeChecker
    situation = LocatedObjectSituation([(situation_object(BALL),
                                         Point(0, 0, 0))])

    generated_descriptions = generator.generate_language(
        situation, RandomChooser.for_seed(0))
    assert len(generated_descriptions) == 1
    assert generated_descriptions[0].as_token_sequence() == ("hello", "world")
def test_speaker_perceivable():
    speaker_situation_perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(
            ontology=GAILA_PHASE_1_ONTOLOGY,
            salient_objects=[situation_object(PERSON, properties=[IS_SPEAKER])],
        ),
        chooser=RandomChooser.for_seed(0),
    )
    assert _some_object_has_binary_property(
        speaker_situation_perception.frames[0], IS_SPEAKER
    )
def test_not_two_speakers():
    with pytest.raises(RuntimeError):
        _PERCEPTION_GENERATOR.generate_perception(
            HighLevelSemanticsSituation(
                ontology=GAILA_PHASE_1_ONTOLOGY,
                salient_objects=[
                    situation_object(PERSON, properties=[IS_SPEAKER]),
                    situation_object(PERSON, properties=[IS_SPEAKER]),
                ],
            ),
            chooser=RandomChooser.for_seed(0),
        )
def test_implicit_ground():
    table_perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(ontology=GAILA_PHASE_1_ONTOLOGY,
                                    salient_objects=[situation_object(TABLE)]),
        chooser=RandomChooser.for_seed(0),
    )

    perceived_objects = table_perception.frames[0].perceived_objects
    object_handles = set(obj.debug_handle for obj in perceived_objects)

    # assert that a "ground" object is perceived
    assert "the ground" in object_handles
예제 #22
0
def test_single_object_generator():
    # warning due to PyCharm bug
    # noinspection PyTypeChecker
    situation = LocatedObjectSituation(
        objects_to_locations=((situation_object(BALL), Point(0, 0, 0)), ))

    single_obj_generator = SingleObjectLanguageGenerator(
        GAILA_PHASE_1_ENGLISH_LEXICON)

    languages_for_situation = single_obj_generator.generate_language(
        situation, RandomChooser.for_seed(0))
    assert len(languages_for_situation) == 1
    assert languages_for_situation[0].as_token_sequence() == ("ball", )
def test_explicit_ground():
    ground_perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(
            ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=[situation_object(GROUND)]
        ),
        chooser=RandomChooser.for_seed(0),
    )

    perceived_objects = ground_perception.frames[0].perceived_objects
    object_handles = set(obj.debug_handle for obj in perceived_objects)

    # assert that a second "ground" object was not generated
    assert object_handles == {"the ground"}
def test_perceive_relations_during():
    learner_perception = _PERCEPTION_GENERATOR.generate_perception(
        make_bird_flies_over_a_house(), chooser=RandomChooser.for_seed(0)
    )
    assert learner_perception.during

    bird = perception_with_handle(learner_perception.frames[0], "**bird_0")
    house = perception_with_handle(learner_perception.frames[0], "**house_0")

    bird_over_the_house = Relation(
        IN_REGION,
        bird,
        Region(reference_object=house, distance=DISTAL, direction=GRAVITATIONAL_UP),
    )

    assert bird_over_the_house in learner_perception.during.at_some_point
예제 #25
0
def log_experiment_entry_point(params: Parameters) -> None:
    experiment_name = params.string("experiment")
    debug_log_dir = params.optional_creatable_directory("debug_log_directory")

    graph_logger: Optional[HypothesisLogger]
    if debug_log_dir:
        logging.info("Debug graphs will be written to %s", debug_log_dir)
        graph_logger = HypothesisLogger(debug_log_dir,
                                        enable_graph_rendering=True)
    else:
        graph_logger = None

    logger = LearningProgressHtmlLogger.create_logger(params)

    language_mode = params.enum("language_mode",
                                LanguageMode,
                                default=LanguageMode.ENGLISH)

    (training_instance_groups,
     test_instance_groups) = curriculum_from_params(params, language_mode)

    execute_experiment(
        Experiment(
            name=experiment_name,
            training_stages=training_instance_groups,
            learner_factory=learner_factory_from_params(
                params, graph_logger, language_mode),
            pre_example_training_observers=[
                logger.pre_observer(),
                CandidateAccuracyObserver("pre-acc-observer"),
            ],
            post_example_training_observers=[logger.post_observer()],
            test_instance_groups=test_instance_groups,
            test_observers=[logger.test_observer()],
            sequence_chooser=RandomChooser.for_seed(0),
        ),
        log_path=params.optional_creatable_directory("hypothesis_log_dir"),
        log_hypotheses_every_n_examples=params.integer(
            "log_hypothesis_every_n_steps", default=250),
        log_learner_state=params.boolean("log_learner_state", default=True),
        learner_logging_path=params.optional_creatable_directory(
            "experiment_group_dir"),
        starting_point=params.integer("starting_point", default=-1),
        point_to_log=params.integer("point_to_log", default=0),
        load_learner_state=params.optional_existing_file("learner_state_path"),
    )
def test_path_from_action_description():
    ball = situation_object(BALL)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[ball],
        actions=[Action(FALL, argument_roles_to_fillers=[(THEME, ball)])],
    )
    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    ball_perception = perception_with_handle(perception.frames[0], "**ball_0")
    ground_perception = perception_with_handle(perception.frames[0], "the ground")
    assert perception.during
    assert perception.during.objects_to_paths
    assert len(perception.during.objects_to_paths) == 1
    path = only(perception.during.objects_to_paths[ball_perception])
    assert path.reference_object == ground_perception
    assert path.operator == TOWARD
def test_relations_between_objects_and_ground():
    # person_put_ball_on_table
    person = situation_object(ontology_node=PERSON)
    ball = situation_object(ontology_node=BALL)
    table = situation_object(ontology_node=TABLE)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[person, ball, table],
        actions=[
            # What is the best way of representing the destination in the high-level semantics?
            # Here we represent it as indicating a relation which should be true.
            Action(
                PUT,
                (
                    (AGENT, person),
                    (THEME, ball),
                    (
                        GOAL,
                        Region(
                            reference_object=table,
                            distance=EXTERIOR_BUT_IN_CONTACT,
                            direction=GRAVITATIONAL_UP,
                        ),
                    ),
                ),
            )
        ],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    ball_perception = perception_with_handle(first_frame, "**ball_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    table_perception = perception_with_handle(first_frame, "**table_0")

    first_frame_relations = first_frame.relations
    second_frame_relations = perception.frames[1].relations

    assert on(ball_perception, ground_perception)[0] in first_frame_relations
    assert on(ball_perception, ground_perception)[0] not in second_frame_relations
    assert on(table_perception, ground_perception)[0] in first_frame_relations
    assert on(table_perception, ground_perception)[0] in second_frame_relations
예제 #28
0
    def from_params(
        params: Parameters,
        *,
        ontology: Optional[Ontology] = None,
        graph_logger: Optional[GraphLogger] = None,
    ) -> "ProposeButVerifyObjectLearner":
        rng = RandomChooser.for_seed(
            params.optional_integer("random_seed", default=0))

        return ProposeButVerifyObjectLearner(
            graph_match_confirmation_threshold=params.floating_point(
                "graph_match_confirmation_threshold", default=0.8),
            graph_logger=graph_logger,
            rng=rng,
            ontology=ontology if ontology else GAILA_PHASE_1_ONTOLOGY,
            language_mode=params.enum("language_mode",
                                      LanguageMode,
                                      default=LanguageMode.ENGLISH),
        )
def test_subobject_perception_has_appropriate_properties():
    """
    Intended to test that an object's subobjects have their properties assigned to them properly
    """
    learner_perception = _PERCEPTION_GENERATOR.generate_perception(
        HighLevelSemanticsSituation(ontology=GAILA_PHASE_1_ONTOLOGY,
                                    salient_objects=[situation_object(TABLE)]),
        chooser=RandomChooser.for_seed(0),
    )
    frame = learner_perception.frames[0]
    property_assertions = frame.property_assertions

    leg_perceptions = [
        perception_with_handle(frame, "**(furniture) leg_0"),
        perception_with_handle(frame, "**(furniture) leg_1"),
        perception_with_handle(frame, "**(furniture) leg_2"),
        perception_with_handle(frame, "**(furniture) leg_3"),
    ]
    assert all(
        HasBinaryProperty(leg_perception, INANIMATE) in property_assertions
        for leg_perception in leg_perceptions)
def test_grounding_of_unsupported_objects():
    box = situation_object(ontology_node=BOX)
    ball = situation_object(ontology_node=BALL)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[box, ball],
        always_relations=[Relation(IN_REGION, ball, Region(box, distance=INTERIOR))],
    )

    perception = _PERCEPTION_GENERATOR.generate_perception(
        situation, chooser=RandomChooser.for_seed(0)
    )
    first_frame = perception.frames[0]
    ball_perception = perception_with_handle(first_frame, "**ball_0")
    ground_perception = perception_with_handle(first_frame, "the ground")
    box_perception = perception_with_handle(first_frame, "**box_0")

    first_frame_relations = first_frame.relations

    assert on(ball_perception, ground_perception)[0] not in first_frame_relations
    assert on(box_perception, ground_perception)[0] in first_frame_relations