示例#1
0
 def test_inverse_non_unique_keys(self):
     with raises(
             ValueError,
             match=
             "forbid_duplicate_keys=True, but some keys occur multiple times in input: .*",
     ):
         immutabledict({"foo": "bar", "bat": "bar"}).inverse()
示例#2
0
    def test_interpolation(self):
        context = Parameters.from_mapping(
            yaml.safe_load(self.WRITING_REFERENCE))
        loader = YAMLParametersLoader()
        self.assertEqual(
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load(self.MULTIPLE_INTERPOLATION_REFERENCE)),
                context,
            )._data,
            immutabledict([
                ("pear", "raspberry"),
                ("banana", "raspberry"),
                ("apple", "raspberry"),
                ("the_ultimate_fruit", "raspberry"),
            ]),
        )
        self.assertEqual(
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load(
                        self.MULTIPLE_INTERPOLATION_REFERENCE_NEEDING_CONTEXT)
                ),
                context,
            )._data,
            immutabledict([
                ("pear", "raspberry/world"),
                ("banana", "raspberry/world"),
                ("apple", "raspberry/world"),
                ("the_ultimate_fruit", "raspberry/world"),
                # the actual pair ("hello", "world") should not be present
            ]),
        )
        self.assertEqual(
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load(self.NESTED_INTERPOLATION)),
                context,
            ).as_nested_dicts(),
            {
                "key": 2,
                "key2": "fooo",
                "key3": {
                    "lalala": "fooo",
                    "meep": 2,
                    "list": [1, 2, 3]
                },
            },
        )

        with self.assertRaisesRegex(
                ParameterInterpolationError,
                r"These interpolated parameters form at least one graph cycle that must be fixed: "
                r"\('b', 'c'\)",
        ):
            loader._interpolate(
                Parameters.from_mapping(
                    yaml.safe_load('a: "%b%"\nb: "%c%"\nc: "%b%"')),
                context,
            )
示例#3
0
 def test_put_all_mapping(self):
     dict1 = immutabledict({"a": 1})
     dict2 = dict1.modified_copy_builder().put_all({
         "c": "d",
         "e": "f"
     }).build()
     self.assertEqual(immutabledict({"a": 1, "c": "d", "e": "f"}), dict2)
示例#4
0
def _render_html(corpus: Corpus, output_dir: Path, parent_or_child_id: str,
                 start: int, end: int) -> Tuple[Path, str]:
    """Outputs either the whole document rendered in HTML or a subspan. `end` is inclusive."""

    document = _get_document(corpus, parent_or_child_id)
    if not document:
        raise ValueError(
            f"{document['parent_id']} not present in the document database.")

    justification_spans: ImmutableDict[str, Span] = immutabledict(
        {f"{start}:{end}": Span(start, end + 1)})

    contexts = contexts_from_justifications(justification_spans, document)

    to_render, _ = render_document(document["fulltext"], justification_spans,
                                   contexts)
    if not to_render:
        raise ValueError("Could not find anything to render.")

    final_html = _render_template(
        document=immutabledict({
            "id": document["parent_id"],
            "title": document["title"],
            "html": to_render,
            "span": f"{start}:{end}",
        }))
    output_file = output_dir / f"{document['parent_id']}_{start}-{end}.html"
    output_file.write_text(final_html)

    return output_file, document["fulltext"][start:end + 1]
示例#5
0
    def get_objects_positions(self) -> PositionsMap:
        """
        Retrieves positions of all AdamObjects contained in this model.
        Returns: PositionsList

        """
        position_pairs: List[Tuple[str, torch.Tensor]] = []
        scale_pairs: List[Tuple[str, torch.Tensor]] = []
        for (
                object_perception,
                bounding_box,
        ) in self.object_perception_to_bounding_box.items():
            position_pairs.append(
                (object_perception.debug_handle, bounding_box.center.data))
            scale_pairs.append(
                (object_perception.debug_handle, bounding_box.scale.data))
        for (
                object_perception,
                bounding_box,
        ) in self.object_perception_to_excluded_bounding_box.items():
            position_pairs.append(
                (object_perception.debug_handle, bounding_box.center.data))
            scale_pairs.append(
                (object_perception.debug_handle, bounding_box.scale.data))

        return PositionsMap(immutabledict(position_pairs),
                            immutabledict(scale_pairs))
示例#6
0
 def describe(
     self, perception: PerceptualRepresentation[PerceptionT]
 ) -> Mapping[LinguisticDescription, float]:
     memorized_description = self._memorized_situations.get(perception)
     if memorized_description:
         return immutabledict(((memorized_description, 1.0), ))
     else:
         return immutabledict()
示例#7
0
 def test_immutable(self):
     source = {"a": 1}
     dict1 = immutabledict(source)
     with self.assertRaises(AttributeError):
         # noinspection PyUnresolvedReferences
         dict1.update({"b": 2})
     # Update doesn't affect original
     source.update({"b": 2})
     self.assertNotEqual(immutabledict(source), dict1)
示例#8
0
def _sort_mapping_by_token_spans(
        pairs) -> ImmutableDict[ObjectSemanticNode, Span]:
    # we type: ignore because the proper typing of pairs is huge and mypy is going to screw it up
    # anyway.
    unsorted = immutabledict(pairs)  # type: ignore
    return immutabledict(
        (matched_node, token_span) for (matched_node, token_span) in sorted(
            unsorted.items(),
            key=lambda item: Span.earliest_then_longest_first_key(item[1]),
        ))
示例#9
0
 def test_pickling(self):
     self.assertEqual(
         pickle.loads(
             pickle.dumps(immutabledict([(5, "apple"), (2, "banana")]))),
         immutabledict([(5, "apple"), (2, "banana")]),
     )
     self.assertEqual(
         immutabledict([(5, "apple"), (2, "banana")]).__reduce__(),
         (immutabledict, (((5, "apple"), (2, "banana")), )),
     )
示例#10
0
 def test_inverse(self):
     self.assertEqual(
         immutabledict({
             "foo": "bar",
             "bar": "bat"
         }).inverse(),
         immutabledict({
             "bar": "foo",
             "bat": "bar"
         }),
     )
示例#11
0
def _sort_mapping_by_pattern_complexity(
        pairs) -> ImmutableDict[str, PerceptionGraphPattern]:
    # we type: ignore because the proper typing of pairs is huge and mypy is going to screw it up
    # anyway.
    unsorted = immutabledict(pairs)  # type: ignore
    return immutabledict((string, pattern) for (string, pattern) in sorted(
        unsorted.items(),
        key=lambda item: (
            len(item[1]._graph.nodes),  # pylint:disable=protected-access
            len(item[1]._graph.edges),  # pylint:disable=protected-access
        ),
        reverse=True,
    ))
示例#12
0
    def _post_process_descriptions(
        self,
        match_results: Sequence[Tuple[TokenSequenceLinguisticDescription,
                                      PerceptionGraphTemplate, float]],
    ) -> Mapping[TokenSequenceLinguisticDescription, float]:
        if not match_results:
            return immutabledict()

        largest_pattern_num_nodes = max(
            len(template.graph_pattern) for (_, template, _) in match_results)

        return immutabledict(
            (description,
             len(template.graph_pattern) / largest_pattern_num_nodes)
            for (description, template, score) in match_results)
示例#13
0
 def _post_process_descriptions(
     self,
     match_results: Sequence[Tuple[TokenSequenceLinguisticDescription,
                                   PerceptionGraphTemplate, float]],
 ) -> Mapping[TokenSequenceLinguisticDescription, float]:
     return immutabledict(
         (description, score) for (description, _, score) in match_results)
示例#14
0
    def observe(
        self,
        learning_example: LearningExample[
            DevelopmentalPrimitivePerceptionFrame, LinguisticDescription],
        offset: int = 0,
    ) -> None:

        logging.info(
            "Observation %s: %s",
            self._observation_num + offset,
            learning_example.linguistic_description.as_token_string(),
        )
        self._observation_num += 1

        self._assert_valid_input(learning_example)

        # Pre-processing steps will be different depending on
        # what sort of structures we are running.
        preprocessed_input = self._preprocess_scene_for_learning(
            LanguageAlignedPerception(
                language=learning_example.linguistic_description,
                perception_graph=self._extract_perception_graph(
                    learning_example.perception),
                node_to_language_span=immutabledict(),
            ))

        logging.info(f"Learner observing {preprocessed_input}")

        surface_template = self._extract_surface_template(
            preprocessed_input, self._language_mode)
        self._learning_step(preprocessed_input, surface_template)
示例#15
0
    def _enrich_common(
        self, perception_semantic_alignment: PerceptionSemanticAlignment
    ) -> PerceptionSemanticAlignment:
        # The functional learner is a 'semantics' learner rather than a perception learner
        # That is, the functional learner 'learns' information from the perceptual learner's output
        # Do to easily process over the semantics we go over these learner semantics here
        semantics = LearnerSemantics.from_nodes(
            perception_semantic_alignment.semantic_nodes)
        list_of_matches: List[Tuple[FunctionalObjectConcept,
                                    ObjectConcept]] = []
        # The functional learner only deals with action semantics so
        # we go over each action semantics in the scene and if a slot filler
        # is of an FunctionalObjectConcept type, then we add a mapping from that
        # FunctionalObjectConcept to a known concept with our best guess concept
        for action_semantic_node in semantics.actions:
            if action_semantic_node.concept in self._concept_to_slots_to_function_counter:
                for slot, slot_filler in action_semantic_node.slot_fillings.items(
                ):
                    if (slot in self._concept_to_slots_to_function_counter[
                            action_semantic_node.concept]):
                        if isinstance(slot_filler.concept,
                                      FunctionalObjectConcept):
                            list_of_matches.append((
                                slot_filler.concept,
                                self._concept_to_slots_to_function_counter[
                                    action_semantic_node.concept]
                                [slot].get_best_concept(),
                            ))

        return perception_semantic_alignment.copy_with_mapping(
            mapping=immutabledict(list_of_matches))
示例#16
0
    def initialize(self,
                   *,
                   initial_partial_match: Mapping[Any, Any] = immutabledict()):
        """Reinitializes the state of the algorithm.

        This method should be redefined if using something other than DiGMState.
        If only subclassing GraphMatcher, a redefinition is not necessary.
        """

        # the alignment of nodes between pattern and graph for the match so far
        self.graph_node_to_pattern_node: Dict[Any, Any] = {}
        self.pattern_node_to_graph_node: Dict[Any, Any] = {}

        # See the paper for definitions of M_x and T_x^{y}

        # the maps below track which nodes are on the "frontier" of the matched region
        # of the pattern and graph matches, etc. - that is, which nodes precede or succeed them.
        # We match nodes themselves are included in both sets for algorithmic convenience.
        # For efficiency during search, these are dicts mapping each node
        # to the depth of the search tree when the node was first encountered
        # as a neighbor to the match.
        self.graph_nodes_in_or_preceding_match: Dict[Any, int] = {}
        self.pattern_nodes_in_or_preceding_match: Dict[Any, int] = {}
        self.graph_nodes_in_or_succeeding_match: Dict[Any, int] = {}
        self.pattern_nodes_in_or_succeeding_match: Dict[Any, int] = {}

        self.state = GraphMatchingState(self)

        # Provide a convenient way to access the isomorphism mapping.
        self.mapping = self.graph_node_to_pattern_node.copy()
        self._jump_to_partial_match(initial_partial_match)
示例#17
0
def contexts_from_justifications(justifications: ImmutableDict[str, Span],
                                 document) -> ImmutableDict[str, Span]:
    document_text = document["fulltext"]
    sentence_spans = get_sentence_spans(document_text)
    contexts: Dict[str, Span] = {}

    for justification_id, justification_span in justifications.items():
        for s_span in sentence_spans:
            if s_span.contains_span(justification_span):
                # the sentence tokenizer doesn't recognize double newline as a potential sentence boundary,
                # so we split on double newlines and return the parts of the pre/post context
                # that are closest to the mention
                precontext_lines = document_text[
                    s_span.start:justification_span.start].split("\n\n")
                precontext_extra = ("\n\n".join(precontext_lines[:-1])
                                    if len(precontext_lines) > 1 else "")

                postcontext_lines = document_text[justification_span.
                                                  end:s_span.end].split("\n\n")
                postcontext_extra = ("\n\n".join(postcontext_lines[1:])
                                     if len(postcontext_lines) > 1 else "")

                modified_sentence_start = s_span.start + len(precontext_extra)
                modified_sentence_end = s_span.end - len(postcontext_extra)

                assert (
                    justification_id not in contexts
                ), "justification should not be overlapping with more than one sentence"
                contexts[justification_id] = Span(modified_sentence_start,
                                                  modified_sentence_end)

    return immutabledict(contexts)
示例#18
0
 def _init_pattern_node_to_template_variable(
     self
 ) -> ImmutableDict[ObjectSemanticNodePerceptionPredicate,
                    SyntaxSemanticsVariable]:
     return immutabledict(
         {v: k
          for k, v in self.template_variable_to_pattern_node.items()})
示例#19
0
    def _sample_combinations(
        self,
        variables: AbstractSet[_VarT],
        *,
        ontology: Ontology,
        chooser: SequenceChooser,
    ) -> Iterable[Mapping[_VarT, OntologyNode]]:
        var_to_options = {
            # beware - the values in this map are infinite generators!
            var: _samples(
                var.node_selector.select_nodes(ontology,
                                               require_non_empty_result=True),
                chooser,
            )
            for var in variables
        }

        if var_to_options:
            for combination in zip(*var_to_options.values()):
                #  this makes a dictionary where the keys are the variables and the values
                # correspond to one of the possible assignments.
                yield immutabledict(zip(var_to_options.keys(), combination))
        else:
            while True:
                # if there are no variables to assign, the only possible assignment
                # is the empty assignment
                yield dict()
示例#20
0
文件: corpus.py 项目: TonyBY/aida-viz
def _create_child_to_parent_map(
        parent_child_file_df: pd.DataFrame) -> Mapping[str, str]:
    """Using the `docs/parent_children.tab` file from an AIDA corpus, creating a mapping from
    child to parent documents.
    """
    return immutabledict([(row["child_uid"], row["parent_uid"])
                          for _, row in parent_child_file_df.iterrows()])
示例#21
0
文件: semantics.py 项目: gabbard/adam
 def from_nodes(
     semantic_nodes: Iterable[SemanticNode],
     *,
     concept_map: ImmutableDict[FunctionalObjectConcept,
                                ObjectConcept] = immutabledict(),
 ) -> "LearnerSemantics":
     return LearnerSemantics(
         objects=[
             node for node in semantic_nodes
             if isinstance(node, ObjectSemanticNode)
         ],
         attributes=[
             node for node in semantic_nodes
             if isinstance(node, AttributeSemanticNode)
         ],
         relations=[
             node for node in semantic_nodes
             if isinstance(node, RelationSemanticNode)
         ],
         actions=[
             node for node in semantic_nodes
             if isinstance(node, ActionSemanticNode)
         ],
         functional_concept_to_object_concept=concept_map,
     )
示例#22
0
 def _preprocess_scene_for_description(
     self, perception_graph: PerceptionGraph
 ) -> PerceptionGraphFromObjectRecognizer:
     return PerceptionGraphFromObjectRecognizer(
         self._common_preprocessing(perception_graph),
         description_to_matched_object_node=immutabledict(),
     )
示例#23
0
    def _instantiate_action(
            self, action_node: ActionSemanticNode,
            learner_semantics: LearnerSemantics) -> Iterator[Tuple[str, ...]]:
        if not self.action_learner:
            raise RuntimeError(
                "Cannot instantiate an action without an action learner")

        for action_template in self.action_learner.templates_for_concept(
                action_node.concept):
            # TODO: Handle instantiate objects returning no result from functional learner
            # If that happens we should break from instantiating this utterance
            slots_to_instantiations = {
                slot:
                list(self._instantiate_object(slot_filler, learner_semantics))
                for (slot, slot_filler) in action_node.slot_fillings.items()
            }
            slot_order = tuple(slots_to_instantiations.keys())

            all_possible_slot_fillings = itertools.product(
                *slots_to_instantiations.values())
            for possible_slot_filling in all_possible_slot_fillings:
                yield action_template.instantiate(
                    immutabledict(
                        zip(slot_order,
                            possible_slot_filling))).as_token_sequence()
示例#24
0
class _AbstractPathMappingKeyValueSource(Generic[V], KeyValueSource[str, V]):
    id_to_path: ImmutableDict[str, Path] = attrib(
        converter=immutabledict, default=immutabledict()
    )

    def keys(self) -> AbstractSet[str]:
        return self.id_to_path.keys()
示例#25
0
 def enrich_during_learning(
     self, language_perception_semantic_alignment: LanguagePerceptionSemanticAlignment
 ) -> LanguagePerceptionSemanticAlignment:
     (
         perception_post_enrichment,
         newly_recognized_semantic_nodes,
     ) = self._enrich_common(
         language_perception_semantic_alignment.perception_semantic_alignment
     )
     return LanguagePerceptionSemanticAlignment(
         # We need to link the things we found to the language
         # so later learning stages can (a) know they are already covered
         # and (b) use this information in forming the surface templates.
         language_concept_alignment=language_perception_semantic_alignment.language_concept_alignment.copy_with_new_nodes(
             immutabledict(
                 # TODO: we currently handle only one template per concept
                 (
                     semantic_node,
                     one(self.templates_for_concept(semantic_node.concept)),
                 )
                 for semantic_node in newly_recognized_semantic_nodes
                 # We make an exception for a specific type of ObjectConcept which
                 # Indicates that we know this root is an object but we don't know
                 # how to refer to it linguisticly
                 if not isinstance(semantic_node.concept, FunctionalObjectConcept)
             ),
             filter_out_duplicate_alignments=True,
             # It's okay if we recognize objects we know how to describe,
             # but they just happen not to be mentioned in the linguistic description.
             fail_if_surface_templates_do_not_match_language=False,
         ),
         perception_semantic_alignment=perception_post_enrichment,
     )
示例#26
0
def pattern_match_to_semantic_node(
    *,
    concept: Concept,
    pattern: PerceptionGraphTemplate,
    match: PerceptionGraphPatternMatch,
) -> SemanticNode:
    template_variable_to_filler: Mapping[
        SyntaxSemanticsVariable, ObjectSemanticNode
    ] = immutabledict(
        (
            pattern.pattern_node_to_template_variable[pattern_node],
            # We know, but the type system does not,
            # that if a ObjectSemanticNodePerceptionPredicate matched,
            # the graph node must be a MatchedObjectNode
            cast(ObjectSemanticNode, matched_graph_node),
        )
        for (
            pattern_node,
            matched_graph_node,
        ) in match.pattern_node_to_matched_graph_node.items()
        if isinstance(pattern_node, ObjectSemanticNodePerceptionPredicate)
        # There can sometimes be relevant matched object nodes which are not themselves
        # slots, like the addressed possessor for "your X".
        and pattern_node in pattern.pattern_node_to_template_variable
    )

    return SemanticNode.for_concepts_and_arguments(
        concept, slots_to_fillers=template_variable_to_filler
    )
示例#27
0
 def _init_patterns_to_num_subobjects(
         self) -> ImmutableDict[ObjectConcept, int]:
     return immutabledict((
         concept,
         pattern.count_nodes_matching(
             lambda node: isinstance(node, AnyObjectPerception)),
     ) for (concept, pattern) in self._concepts_to_static_patterns.items())
示例#28
0
    def for_ontology_types(
        ontology_types: Iterable[OntologyNode],
        determiners: Iterable[str],
        ontology: Ontology,
        language_mode: LanguageMode,
        *,
        perception_generator:
        HighLevelSemanticsSituationToDevelopmentalPrimitivePerceptionGenerator,
    ) -> "ObjectRecognizer":
        ontology_types_to_concepts = {
            obj_type: ObjectConcept(obj_type.handle)
            for obj_type in ontology_types
        }

        return ObjectRecognizer(
            concepts_to_static_patterns=_sort_mapping_by_pattern_complexity(
                immutabledict((
                    concept,
                    PerceptionGraphPattern.from_ontology_node(
                        obj_type,
                        ontology,
                        perception_generator=perception_generator),
                ) for (obj_type,
                       concept) in ontology_types_to_concepts.items())),
            determiners=determiners,
            concepts_to_names={
                concept: obj_type.handle
                for obj_type, concept in ontology_types_to_concepts.items()
            },
            language_mode=language_mode,
        )
def _to_immutabledict(
    val: Optional[Union[Iterable[Tuple[Any, Any]], Mapping[Any, Any],
                        ImmutableDict[Any, Any]]]
) -> ImmutableDict[Any, Any]:
    """Needed until https://github.com/python/mypy/issues/5738
        and https://github.com/python-attrs/attrs/issues/519 are fixed.
    """
    return immutabledict(val)
示例#30
0
class TemplateVariableAssignment:
    """
    An assignment of ontology types to object and property variables in a situation.
    """

    object_variables_to_fillers: ImmutableDict["TemplateObjectVariable",
                                               OntologyNode] = attrib(
                                                   converter=_to_immutabledict,
                                                   default=immutabledict())
    property_variables_to_fillers: ImmutableDict[
        "TemplatePropertyVariable",
        OntologyNode] = attrib(converter=_to_immutabledict,
                               default=immutabledict())
    action_variables_to_fillers: ImmutableDict["TemplateActionTypeVariable",
                                               OntologyNode] = attrib(
                                                   converter=_to_immutabledict,
                                                   default=immutabledict())