Exemple #1
0
    def _instantiate_object(
        self,
        template_object: SituationTemplateObject,
        template: SimpleSituationTemplate,
        chooser: SequenceChooser,
    ) -> SituationObject:
        object_supertype = template.objects_to_ontology_types[template_object]
        required_properties = template.objects_to_required_properties[
            template_object]

        compatible_ontology_types = self._ontology.nodes_with_properties(
            root_node=object_supertype,
            required_properties=required_properties)

        if compatible_ontology_types:
            ontology_node = chooser.choice(compatible_ontology_types)
            return SituationObject.instantiate_ontology_node(
                ontology_node,
                properties=self._ontology.properties_for_node(ontology_node),
                debug_handle=template_object.handle,
                ontology=GAILA_PHASE_1_ONTOLOGY,
            )
        else:
            raise RuntimeError(
                f"When attempting to instantiate object {template_object} in"
                f" {template}: no node at or under {object_supertype} with "
                f"properties {required_properties} exists in the ontology "
                f"{self._ontology}")
Exemple #2
0
def test_recognizes_ontology_objects(object_type, language_mode):
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[
            SituationObject.instantiate_ontology_node(
                ontology_node=object_type, ontology=GAILA_PHASE_1_ONTOLOGY)
        ],
    )
    perception_generator = HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator(
        GAILA_PHASE_1_ONTOLOGY)
    perception = perception_generator.generate_perception(
        situation, chooser=RandomChooser.for_seed(0), include_ground=False)
    learner = IntegratedTemplateLearner(
        object_learner=LANGUAGE_MODE_TO_TEMPLATE_LEARNER_OBJECT_RECOGNIZER[
            language_mode])
    descriptions = learner.describe(perception)
    assert descriptions
    if language_mode == LanguageMode.ENGLISH:
        assert object_type.handle in one(
            descriptions.items())[0].as_token_sequence()
    else:
        mappings = (
            GAILA_PHASE_1_CHINESE_LEXICON._ontology_node_to_word  # pylint:disable=protected-access
        )
        for k, v in mappings.items():
            if k.handle == object_type.handle:
                assert v.base_form in one(
                    descriptions.items())[0].as_token_sequence()
Exemple #3
0
 def _instantiate_object(
     self,
     object_var: TemplateObjectVariable,
     variable_assignment: "TemplateVariableAssignment",
     *,
     has_addressee: bool,
     default_addressee_node: OntologyNode,
 ) -> SituationObject:
     object_type = variable_assignment.object_variables_to_fillers[
         object_var]
     asserted_properties = object_var.asserted_properties
     if object_type == default_addressee_node and not has_addressee:
         asserted_properties = immutableset(
             object_var.asserted_properties.union({IS_ADDRESSEE}))
     return SituationObject.instantiate_ontology_node(
         ontology_node=object_type,
         properties=[
             # instantiate any property variables associated with this object
             variable_assignment.
             property_variables_to_fillers[asserted_property] if isinstance(
                 asserted_property,
                 TemplatePropertyVariable) else asserted_property
             for asserted_property in asserted_properties
         ],
         ontology=self.ontology,
     )
Exemple #4
0
def test_trivial_dynamic_situation_with_schemaless_object(language_mode):
    dad_situation_object = SituationObject.instantiate_ontology_node(
        ontology_node=DAD, ontology=GAILA_PHASE_1_ONTOLOGY)
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=[dad_situation_object])
    perception_generator = HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator(
        GAILA_PHASE_1_ONTOLOGY)
    # We explicitly exclude ground in perception generation

    # this generates a static perception...
    perception = perception_generator.generate_perception(
        situation, chooser=RandomChooser.for_seed(0), include_ground=False)

    # so we need to construct a dynamic one by hand from two identical scenes
    dynamic_perception = PerceptualRepresentation(
        frames=[perception.frames[0], perception.frames[0]])

    perception_graph = PerceptionGraph.from_dynamic_perceptual_representation(
        dynamic_perception)
    perception_semantic_alignment = PerceptionSemanticAlignment.create_unaligned(
        perception_graph)
    (_, description_to_matched_semantic_node
     ) = LANGUAGE_MODE_TO_OBJECT_RECOGNIZER[language_mode].match_objects(
         perception_semantic_alignment)
    assert len(description_to_matched_semantic_node) == 1
    assert (language_mode == LanguageMode.ENGLISH and
            ("Dad", ) in description_to_matched_semantic_node) or (
                language_mode == LanguageMode.CHINESE and
                ("ba4 ba4", ) in description_to_matched_semantic_node)
Exemple #5
0
 def build_object_multiples_situations(
         ontology: Ontology,
         *,
         samples_per_object: int = 3,
         chooser: RandomChooser) -> Iterable[HighLevelSemanticsSituation]:
     for object_type in PHASE_1_CURRICULUM_OBJECTS:
         # Exclude slow objects for now
         if object_type.handle in ["bird", "dog", "truck"]:
             continue
         is_liquid = ontology.has_all_properties(object_type, [LIQUID])
         # don't want multiples of named people
         if not is_recognized_particular(ontology,
                                         object_type) and not is_liquid:
             for _ in range(samples_per_object):
                 num_objects = chooser.choice(range(2, 4))
                 yield HighLevelSemanticsSituation(
                     ontology=GAILA_PHASE_1_ONTOLOGY,
                     salient_objects=[
                         SituationObject.instantiate_ontology_node(
                             ontology_node=object_type,
                             debug_handle=object_type.handle + f"_{idx}",
                             ontology=GAILA_PHASE_1_ONTOLOGY,
                         ) for idx in range(num_objects)
                     ],
                     axis_info=AxesInfo(),
                 )
Exemple #6
0
def situation_object(
        ontology_node: OntologyNode,
        *,
        debug_handle: Optional[str] = None,
        properties: Iterable[OntologyNode] = tuple(),
) -> SituationObject:
    return SituationObject.instantiate_ontology_node(
        ontology_node=ontology_node,
        ontology=GAILA_PHASE_1_ONTOLOGY,
        debug_handle=debug_handle,
        properties=properties,
    )
Exemple #7
0
def test_simple_experiment():
    language_generator = SingleObjectLanguageGenerator(
        GAILA_PHASE_1_ENGLISH_LEXICON)
    perception_generator = DummyVisualPerceptionGenerator()

    only_show_truck = GeneratedFromSituationsInstanceGroup(
        name="only-ball",
        situations=[
            LocatedObjectSituation([(
                SituationObject.instantiate_ontology_node(
                    BALL, ontology=GAILA_PHASE_1_ONTOLOGY),
                Point(0.0, 0.0, 0.0),
            )])
        ],
        language_generator=language_generator,
        perception_generator=perception_generator,
        chooser=RandomChooser.for_seed(0),
    )

    pre_acc = CandidateAccuracyObserver("pre")
    post_acc = CandidateAccuracyObserver("post")
    test_acc = CandidateAccuracyObserver("test")

    experiment = Experiment(
        name="simple",
        training_stages=[only_show_truck],
        learner_factory=MemorizingLanguageLearner,
        pre_example_training_observers=[
            TopChoiceExactMatchObserver("pre"), pre_acc
        ],
        post_example_training_observers=[
            TopChoiceExactMatchObserver("post"), post_acc
        ],
        warm_up_test_instance_groups=[only_show_truck],
        test_instance_groups=[only_show_truck],
        test_observers=[TopChoiceExactMatchObserver("test"), test_acc],
        sequence_chooser=RandomChooser.for_seed(0),
    )

    execute_experiment(experiment)

    assert pre_acc.accuracy() == 0.0
    assert post_acc.accuracy() == 1.0
    assert test_acc.accuracy() == 1.0
def test_with_object_recognizer(language_mode):
    integrated_learner = IntegratedTemplateLearner(
        object_learner=LANGUAGE_MODE_TO_TEMPLATE_LEARNER_OBJECT_RECOGNIZER[language_mode],
        attribute_learner=None,
        relation_learner=None,
        action_learner=None,
    )

    dad_situation_object = SituationObject.instantiate_ontology_node(
        ontology_node=DAD, ontology=GAILA_PHASE_1_ONTOLOGY
    )
    situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=[dad_situation_object]
    )
    perception_generator = HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator(
        GAILA_PHASE_1_ONTOLOGY
    )
    # We explicitly exclude ground in perception generation

    # this generates a static perception...
    perception = perception_generator.generate_perception(
        situation, chooser=RandomChooser.for_seed(0), include_ground=False
    )

    # so we need to construct a dynamic one by hand from two identical scenes
    dynamic_perception = PerceptualRepresentation(
        frames=[perception.frames[0], perception.frames[0]]
    )

    descriptions = integrated_learner.describe(dynamic_perception)

    assert len(descriptions) == 1
    assert (
        language_mode == LanguageMode.ENGLISH
        and one(descriptions.keys()).as_token_sequence() == ("Dad",)
    ) or (
        language_mode == LanguageMode.CHINESE
        and one(descriptions.keys()).as_token_sequence() == ("ba4 ba4",)
    )
Exemple #9
0
    def _instantiate_objects(
        self,
        template: Phase1SituationTemplate,
        variable_assignment: "TemplateVariableAssignment",
        *,
        default_addressee_node: OntologyNode,
    ):
        has_addressee = any(IS_ADDRESSEE in object_.asserted_properties
                            for object_ in template.all_object_variables)

        object_var_to_instantiations_mutable: List[Tuple[
            TemplateObjectVariable, SituationObject]] = [(
                obj_var,
                self._instantiate_object(
                    obj_var,
                    variable_assignment,
                    has_addressee=has_addressee,
                    default_addressee_node=default_addressee_node,
                ),
            ) for obj_var in template.all_object_variables]
        if (default_addressee_node not in immutableset(
                object_.ontology_node
                for (_, object_) in object_var_to_instantiations_mutable)
                and not has_addressee):
            object_var_to_instantiations_mutable.append((
                object_variable(default_addressee_node.handle,
                                default_addressee_node),
                SituationObject.instantiate_ontology_node(
                    default_addressee_node,
                    properties=[IS_ADDRESSEE],
                    debug_handle=default_addressee_node.handle +
                    "_default_addressee",
                    ontology=self.ontology,
                ),
            ))
        return immutabledict(object_var_to_instantiations_mutable)
Exemple #10
0
def build_object_learner_experiment_curriculum_train(
    num_samples: Optional[int],
    num_noise_objects: Optional[int],
    language_generator: LanguageGenerator[
        HighLevelSemanticsSituation, LinearizedDependencyTree
    ],
    *,
    params: Parameters = Parameters.empty(),
) -> Sequence[Phase1InstanceGroup]:
    situations = make_multiple_object_situation(
        num_samples, num_noise_objects, language_generator
    )
    accurate_language_chance = params.floating_point(
        "accurate_language_percentage", default=0.5
    )
    output_situations = []
    random.seed(params.integer("random_seed", default=0))
    rng = RandomChooser.for_seed(params.integer("language_random_seed", default=0))
    for (situation, language, perception) in situations.instances():
        if random.random() <= accurate_language_chance:
            output_language = language
        else:
            # Make Invalid Language
            if situation and isinstance(situation, HighLevelSemanticsSituation):
                # First, gather all OntologyNodes which aren't already present in the situation
                present_ontology_nodes = [
                    _object.ontology_node for _object in situation.all_objects
                ]
                valid_other_objects = [
                    node
                    for node in PHASE_1_CURRICULUM_OBJECTS
                    if node not in present_ontology_nodes
                ]
                # Then choose one at random
                chosen_ontology_node = rng.choice(valid_other_objects)
                # Make a fake situation with just this object in it, ignoring colors
                wrong_situation = HighLevelSemanticsSituation(
                    ontology=GAILA_PHASE_2_ONTOLOGY,
                    salient_objects=[
                        SituationObject.instantiate_ontology_node(
                            chosen_ontology_node, ontology=GAILA_PHASE_2_ONTOLOGY
                        )
                    ],
                    syntax_hints=[IGNORE_COLORS],
                )
                # Generate the language as if it came from this fake situation rather than the original one
                fake_language = only(
                    language_generator.generate_language(wrong_situation, chooser=rng)
                )
                output_language = LinearizedDependencyTree(
                    dependency_tree=fake_language.dependency_tree,
                    surface_token_order=fake_language.surface_token_order,
                    accurate=False,
                )

            else:
                raise RuntimeError(
                    f"Unable to make invalid language without a situation of type HighlevelSemanticsSituation. Got situation: {situation}"
                )

        output_situations.append((situation, output_language, perception))
    return [
        AblatedLanguageSituationsInstanceGroup(
            name=f"{situations.name()}_ablated", instances=output_situations
        )
    ]
Exemple #11
0
def run_experiment(learner, curricula, experiment_id):
    # Teach each pretraining curriculum
    for curriculum in curricula:
        print("Teaching", curriculum.name(), "curriculum")
        for (
                _,
                linguistic_description,
                perceptual_representation,
        ) in curriculum.instances():
            # Get the object matches first - prepositison learner can't learn without already recognized objects
            # print('Observation: ',' '.join(linguistic_description.as_token_sequence()))
            learner.observe(
                LearningExample(perceptual_representation,
                                linguistic_description))

    # Teach each kind member
    empty_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_2_ONTOLOGY,
        salient_objects=immutableset([
            SituationObject.instantiate_ontology_node(
                ontology_node=GROUND,
                debug_handle=GROUND.handle,
                ontology=GAILA_PHASE_1_ONTOLOGY,
            )
        ]),
    )
    empty_perception = GAILA_PHASE_2_PERCEPTION_GENERATOR.generate_perception(
        empty_situation, PHASE1_CHOOSER_FACTORY())
    pseudoword_to_kind = {"wug": "animal", "vonk": "food", "snarp": "people"}

    print("Teaching new objects in known categories")
    for word, kind in pseudoword_to_kind.items():
        print("Observation: ", word, "s", "are", kind, "s")
        learner.observe(
            LearningExample(
                empty_perception,
                TokenSequenceLinguisticDescription(tokens=(word, "s", "are",
                                                           kind, "s"))
                if kind != "people" else TokenSequenceLinguisticDescription(
                    tokens=(word, "s", "are", kind, "s")),
            ))

    semantics_manager: SemanticsManager = SemanticsManager(
        semantics_graph=learner.semantics_graph)
    complete_results = []
    print("\nResults for ", experiment_id)
    for word, _ in pseudoword_to_kind.items():
        results = [(kind,
                    semantics_manager.evaluate_kind_membership(word, kind))
                   for kind in pseudoword_to_kind.values()]
        complete_results.append(results)

    results_df = pd.DataFrame(
        [[np.asscalar(i[1]) for i in l] for l in complete_results],
        columns=["Animal", "Food", "People"],
    )
    results_df.insert(0, "Words", pseudoword_to_kind.keys())

    # print(results_df.to_csv(index=False))
    print(tabulate(results_df, headers="keys", tablefmt="psql"))

    learner.log_hypotheses(Path(f"./renders/{experiment_id}"))

    learner.render_semantics_to_file(
        graph=learner.semantics_graph,
        graph_name="semantics",
        output_file=Path(f"./renders/{experiment_id}/semantics.png"),
    )
Exemple #12
0
def test_subset_learner_subobject():
    mom = SituationObject.instantiate_ontology_node(
        ontology_node=MOM, ontology=GAILA_PHASE_1_ONTOLOGY)
    head = SituationObject.instantiate_ontology_node(
        ontology_node=HEAD, ontology=GAILA_PHASE_1_ONTOLOGY)
    hand = SituationObject.instantiate_ontology_node(
        ontology_node=HAND, ontology=GAILA_PHASE_1_ONTOLOGY)
    ball = SituationObject.instantiate_ontology_node(
        ontology_node=BALL, ontology=GAILA_PHASE_1_ONTOLOGY)
    house = SituationObject.instantiate_ontology_node(
        ontology_node=HOUSE, ontology=GAILA_PHASE_1_ONTOLOGY)
    ground = SituationObject.instantiate_ontology_node(
        ontology_node=GROUND, ontology=GAILA_PHASE_1_ONTOLOGY)

    mom_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY, salient_objects=immutableset([mom]))

    floating_head_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([head]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(head, ground))),
    )

    # Need to include some extra situations so that the learner will prune its semantics for 'a'
    # away and not recognize it as an object.
    floating_hand_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([hand]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(hand, ground))),
    )

    floating_ball_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([ball]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(ball, ground))),
    )

    floating_house_situation = HighLevelSemanticsSituation(
        ontology=GAILA_PHASE_1_ONTOLOGY,
        salient_objects=immutableset([house]),
        other_objects=immutableset([ground]),
        always_relations=flatten_relations(negate(on(house, ground))),
    )

    object_learner = SubsetObjectLearnerNew(ontology=GAILA_PHASE_1_ONTOLOGY,
                                            beam_size=5,
                                            language_mode=LanguageMode.ENGLISH)

    for situation in [
            mom_situation,
            floating_head_situation,
            floating_hand_situation,
            floating_ball_situation,
            floating_house_situation,
    ]:
        perceptual_representation = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
            situation, chooser=RandomChooser.for_seed(0))
        for linguistic_description in GAILA_PHASE_1_LANGUAGE_GENERATOR.generate_language(
                situation, chooser=RandomChooser.for_seed(0)):
            perception_graph = PerceptionGraph.from_frame(
                perceptual_representation.frames[0])

            object_learner.learn_from(
                LanguagePerceptionSemanticAlignment(
                    language_concept_alignment=LanguageConceptAlignment.
                    create_unaligned(language=linguistic_description),
                    perception_semantic_alignment=PerceptionSemanticAlignment(
                        perception_graph=perception_graph, semantic_nodes=[]),
                ))

    mom_perceptual_representation = GAILA_PHASE_1_PERCEPTION_GENERATOR.generate_perception(
        mom_situation, chooser=RandomChooser.for_seed(0))
    perception_graph = PerceptionGraph.from_frame(
        mom_perceptual_representation.frames[0])
    enriched = object_learner.enrich_during_description(
        PerceptionSemanticAlignment.create_unaligned(perception_graph))

    semantic_node_types_and_debug_strings = {
        (type(semantic_node), semantic_node.concept.debug_string)
        for semantic_node in enriched.semantic_nodes
    }
    assert (ObjectSemanticNode, "Mom") in semantic_node_types_and_debug_strings
    assert (ObjectSemanticNode,
            "head") in semantic_node_types_and_debug_strings
    assert (ObjectSemanticNode,
            "hand") in semantic_node_types_and_debug_strings
def make_spin_tall_short_curriculum(  # pylint: disable=unused-argument
    num_samples: Optional[int],
    noise_objects: Optional[int],
    language_generator: LanguageGenerator[HighLevelSemanticsSituation,
                                          LinearizedDependencyTree],
) -> Phase1InstanceGroup:
    # "Mom spins a tall chair"
    # We generate situations directly since templates fail to generate plurals.

    learner = SituationObject.instantiate_ontology_node(
        ontology_node=LEARNER,
        debug_handle=LEARNER.handle,
        ontology=GAILA_PHASE_1_ONTOLOGY,
    )
    situations = []
    for agent_ontology_node in [MOM, DAD, BABY, DOG]:
        agent = SituationObject.instantiate_ontology_node(
            ontology_node=agent_ontology_node,
            debug_handle=agent_ontology_node.handle,
            ontology=GAILA_PHASE_1_ONTOLOGY,
        )
        for _object in [CHAIR, TABLE]:
            theme = SituationObject.instantiate_ontology_node(
                ontology_node=_object,
                debug_handle=_object.handle,
                ontology=GAILA_PHASE_1_ONTOLOGY,
            )
            other_objs = [
                SituationObject.instantiate_ontology_node(
                    ontology_node=_object,
                    debug_handle=_object.handle + f"_{i}",
                    ontology=GAILA_PHASE_1_ONTOLOGY,
                ) for i in range(3)
            ]
            computed_background = [learner]
            computed_background.extend(other_objs)

            # Tall and short
            for relation_list in [
                [bigger_than(learner, theme),
                 bigger_than(other_objs, theme)],
                [bigger_than(theme, learner),
                 bigger_than(theme, other_objs)],
            ]:
                situations.append(
                    HighLevelSemanticsSituation(
                        ontology=GAILA_PHASE_1_ONTOLOGY,
                        salient_objects=[agent, theme],
                        other_objects=computed_background,
                        actions=[
                            Action(
                                SPIN,
                                argument_roles_to_fillers=[
                                    (AGENT, agent),
                                    (THEME, theme),
                                ],
                            )
                        ],
                        always_relations=relation_list,
                        syntax_hints=[USE_VERTICAL_MODIFIERS],
                    ))

    return phase1_instances("Tall - Short Curriculum",
                            situations,
                            language_generator=language_generator)
def make_eat_big_small_curriculum(  # pylint: disable=unused-argument
    num_samples: Optional[int],
    noise_objects: Optional[int],
    language_generator: LanguageGenerator[HighLevelSemanticsSituation,
                                          LinearizedDependencyTree],
) -> Phase1InstanceGroup:
    # "Mom eats a big cookie"
    # We generate situations directly since templates fail to generate plurals.

    learner = SituationObject.instantiate_ontology_node(
        ontology_node=LEARNER,
        debug_handle=LEARNER.handle,
        ontology=GAILA_PHASE_1_ONTOLOGY,
    )
    situations = []

    for eater_ontology_node in [MOM, DAD, BABY, DOG]:
        eater = SituationObject.instantiate_ontology_node(
            ontology_node=eater_ontology_node,
            debug_handle=eater_ontology_node.handle,
            ontology=GAILA_PHASE_1_ONTOLOGY,
        )
        for _object in [COOKIE, WATERMELON]:
            object_to_eat = SituationObject.instantiate_ontology_node(
                ontology_node=_object,
                debug_handle=_object.handle,
                ontology=GAILA_PHASE_1_ONTOLOGY,
            )
            other_edibles = [
                SituationObject.instantiate_ontology_node(
                    ontology_node=_object,
                    debug_handle=_object.handle + f"_{i}",
                    ontology=GAILA_PHASE_1_ONTOLOGY,
                ) for i in range(3)
            ]
            computed_background = [learner]
            computed_background.extend(other_edibles)

            # Big
            for relation_list in [
                [
                    bigger_than(object_to_eat, learner),
                    bigger_than(object_to_eat, other_edibles),
                ],
                [
                    bigger_than(learner, object_to_eat),
                    bigger_than(other_edibles, object_to_eat),
                ],
            ]:
                situations.append(
                    HighLevelSemanticsSituation(
                        ontology=GAILA_PHASE_1_ONTOLOGY,
                        salient_objects=[eater, object_to_eat],
                        other_objects=computed_background,
                        actions=[
                            Action(
                                EAT,
                                argument_roles_to_fillers=[
                                    (AGENT, eater),
                                    (PATIENT, object_to_eat),
                                ],
                            )
                        ],
                        always_relations=relation_list,
                    ))

    return phase1_instances("Big - Small Curriculum",
                            situations,
                            language_generator=language_generator)