Example #1
0
def main():
    m = ProAdverbManager()

    proximal_sing = Selector(
        Correlative.PROX, N5.SING, N5.SING, N5.SING, N5.SING)
    r = m.say(PLACE_PREP, proximal_sing, 'place', False)
    assert r.tokens == ['here']
    assert r.conjugation == Conjugation.S3
    assert r.eat_prep == True

    ss = 'here',
    aaa = m.parse(ss)
    assert len(aaa) == 1
    prep, selector, noun = aaa[0]
    assert prep == PLACE_PREP
    assert selector.dump() == proximal_sing.dump()
    assert noun == 'place'

    existential_sing = Selector(
        Correlative.EXIST, N5.SING, N5.SING, N5.DUAL, N5.MANY)
    r = m.say(None, existential_sing, 'person', False)
    assert r.tokens == ['someone']
    assert r.conjugation == Conjugation.S3
    assert r.eat_prep == False

    ss = 'someone',
    aaa = m.parse(ss)
    assert len(aaa) == 1
    prep, selector, noun = aaa[0]
    assert prep == None
    assert selector.dump() == existential_sing.dump()
    assert noun == 'person'
Example #2
0
def combine_entries(aaa, cor2res_gno):
    """
    list of (Correlative, is pro, is plur, out of) -> (Selectors, Selectors)
    """
    # They are all the same Correlative except for "some" (INDEF + EXIST).
    cor_pro2ns_ofs = defaultdict(list)
    for correlative, is_pro, is_plur, of in aaa:
        if is_plur:
            nn = [N5.ZERO, N5.DUAL, N5.FEW, N5.MANY]
        else:
            nn = [N5.SING]
        nn = filter(lambda n: n <= of, nn)
        for n in nn:
            cor_pro2ns_ofs[(correlative, is_pro)].append((n, of))

    # For each grouping,
    dets = []
    pros = []
    for correlative, is_pro in sorted(cor_pro2ns_ofs):
        ns_ofs = cor_pro2ns_ofs[(correlative, is_pro)]

        # Collect the variety of how many/out of there are for this word.
        ns, ofs = map(set, zip(*ns_ofs))

        # Require that each out-of range is contiguous.  This is also because it
        # happens to be true and it allows Selectors to contain ranges instead
        # of the insanity of individual N5s.
        ofs = sorted(ofs)
        assert ofs == range(ofs[0], ofs[-1] + 1)

        for n_min, n_max in split_into_ranges(sorted(ns)):
            # Get the possible range of how many they were selected from.
            of_n_min = min(ofs)
            of_n_max = max(ofs)

            # Create a Selector that covers all of those tuples.
            r = Selector(correlative, n_min, n_max, of_n_min, of_n_max)
            count_restriction, _ = cor2res_gno[correlative]
            r = r.fitted_to_count_restriction(count_restriction)
            if not r:
                continue

            if is_pro:
                pros.append(r)
            else:
                dets.append(r)

    return dets, pros
Example #3
0
def test_say(m):
    selector = Selector(Correlative.DEF, N5.FEW, N5.FEW, N5.FEW, N5.FEW)
    r = SayResult(tokens=['the'], conjugation=Conjugation.P3, eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.DEF, N5.SING, N5.SING, N5.SING, N5.SING)
    r = SayResult(tokens=['the'], conjugation=Conjugation.S3, eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.DEF, N5.SING, N5.SING, N5.SING, N5.SING)
    r = None
    say(m, selector, True, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.DUAL, N5.DUAL)
    r = SayResult(tokens=['neither'],
                  conjugation=Conjugation.P3,
                  eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.DUAL, N5.DUAL)
    r = SayResult(tokens=['neither'],
                  conjugation=Conjugation.P3,
                  eat_prep=False)
    say(m, selector, True, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.FEW, N5.MANY)
    r = SayResult(tokens=['no'], conjugation=Conjugation.P3, eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.FEW, N5.MANY)
    r = SayResult(tokens=['none'], conjugation=Conjugation.P3, eat_prep=False)
    say(m, selector, True, r)
Example #4
0
def main():
    m = ProAdverbManager()

    proximal_sing = Selector(Correlative.PROX, N5.SING, N5.SING, N5.SING,
                             N5.SING)
    r = m.say(PLACE_PREP, proximal_sing, 'place', False)
    assert r.tokens == ['here']
    assert r.conjugation == Conjugation.S3
    assert r.eat_prep == True

    ss = 'here',
    aaa = m.parse(ss)
    assert len(aaa) == 1
    prep, selector, noun = aaa[0]
    assert prep == PLACE_PREP
    assert selector.dump() == proximal_sing.dump()
    assert noun == 'place'

    existential_sing = Selector(Correlative.EXIST, N5.SING, N5.SING, N5.DUAL,
                                N5.MANY)
    r = m.say(None, existential_sing, 'person', False)
    assert r.tokens == ['someone']
    assert r.conjugation == Conjugation.S3
    assert r.eat_prep == False

    ss = 'someone',
    aaa = m.parse(ss)
    assert len(aaa) == 1
    prep, selector, noun = aaa[0]
    assert prep == None
    assert selector.dump() == existential_sing.dump()
    assert noun == 'person'
Example #5
0
def test_parse(m):
    parse(m, 'the', [])

    parse(m, 'the _', [
        Selector(Correlative.DEF, N5.SING, N5.MANY, N5.SING, N5.MANY),
    ])

    parse(m, 'this', [
        Selector(Correlative.PROX, N5.SING, N5.SING, N5.SING, N5.SING),
    ])

    parse(m, 'both', [
        Selector(Correlative.UNIV_ALL, N5.DUAL, N5.DUAL, N5.DUAL, N5.DUAL),
    ])

    parse(m, 'neither', [
        Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.DUAL, N5.DUAL),
    ])

    parse(m, 'either', [
        Selector(Correlative.ELECT_ANY, N5.SING, N5.SING, N5.DUAL, N5.DUAL),
    ])

    parse(m, 'any', [
        Selector(Correlative.ELECT_ANY, N5.SING, N5.MANY, N5.FEW, N5.MANY),
    ])
Example #6
0
    def load(d, loader):
        possessor = loader.load(d["possessor"])
        selector = Selector.load(d["selector"])
        number = loader.load(d["number"])
        attributes = map(loader.load, d["attributes"])
        noun = d["noun"]

        preps_nargs = []
        for prep, arg in d["preps_nargs"]:
            if arg:
                arg = loader.load(arg)
            preps_nargs.append((prep, arg))

        return SurfaceCommonNoun(possessor, selector, number, attributes, noun, preps_nargs)
Example #7
0
    def load(d, loader):
        possessor = loader.load(d['possessor'])
        selector = Selector.load(d['selector'])
        number = loader.load(d['number'])
        attributes = d['attributes']
        noun = d['noun']

        rels_nargs = []
        for rel, arg in d['rels_nargs']:
            rel = Relation.from_str[rel]
            arg = loader.load(arg)
            rels_nargs.append((rel, arg))

        return DeepCommonNoun(possessor, selector, number, attributes, noun,
                              rels_nargs)
Example #8
0
    def load(d, loader):
        possessor = loader.load(d['possessor'])
        selector = Selector.load(d['selector'])
        number = loader.load(d['number'])
        attributes = d['attributes']
        noun = d['noun']

        rels_nargs = []
        for rel, arg in d['rels_nargs']:
            rel = Relation.from_str[rel]
            arg = loader.load(arg)
            rels_nargs.append((rel, arg))

        return DeepCommonNoun(possessor, selector, number, attributes, noun,
                              rels_nargs)
Example #9
0
    def load(d, loader):
        possessor = loader.load(d['possessor'])
        selector = Selector.load(d['selector'])
        number = loader.load(d['number'])
        attributes = map(loader.load, d['attributes'])
        noun = d['noun']

        preps_nargs = []
        for prep, arg in d['preps_nargs']:
            if arg:
                arg = loader.load(arg)
            preps_nargs.append((prep, arg))

        return SurfaceCommonNoun(possessor, selector, number, attributes, noun,
                                 preps_nargs)
Example #10
0
    def parse(self, ss):
        """
        tokens -> list of (hallucinated preposition, Selector, noun)

        Try to pull a pro-adverb out of the given words (typically just one
        word).
        """
        rr = []
        for is_archaic in [False, True]:
            for correlative, pro_adverb_col in \
                    self.ss_archaic2cors_pacs[(ss, is_archaic)]:
                noun = self.pro_adverb_col2noun[pro_adverb_col]
                prep = self.noun2hallucinate_prep.get(noun)
                count_res, _ = self.cor2res_gno[correlative]
                selector = Selector.from_correlative(correlative, count_res)
                rr.append((prep, selector, noun))
        return rr
Example #11
0
    def parse(self, ss):
        """
        tokens -> list of (hallucinated preposition, Selector, noun)

        Try to pull a pro-adverb out of the given words (typically just one
        word).
        """
        rr = []
        for is_archaic in [False, True]:
            for correlative, pro_adverb_col in \
                    self.ss_archaic2cors_pacs[(ss, is_archaic)]:
                noun = self.pro_adverb_col2noun[pro_adverb_col]
                prep = self.noun2hallucinate_prep.get(noun)
                count_res, _ = self.cor2res_gno[correlative]
                selector = Selector.from_correlative(correlative, count_res)
                rr.append((prep, selector, noun))
        return rr
Example #12
0
    def recog_dt_nn_head(self, root_token, noun, gram_n2):
        """
        * (ADJS) NN(S)     "fat mice"
        * DT (ADJS) NN(S)  "the fat mice"
        """
        downs = filter(lambda (rel, child): rel not in ('cc', 'conj', 'prep'),
                       root_token.downs)
        if downs:
            dep, child = downs[0]
            if dep != 'det':
                return []

            s = child.text
            if s == 'a' or s == 'an':
                s = A_OR_AN

            maybe_selectors = self.det_pronoun_mgr.parse_determiner(s)

            selectors = []
            for maybe_selector in maybe_selectors:
                for sel in maybe_selector.restricted_to_grammatical_number(
                        gram_n2, self.det_pronoun_mgr.cor2res_gno):
                    selectors.append(sel)
        else:
            if gram_n2 == N2.SING:
                return []

            selector = Selector(Correlative.INDEF, N5.DUAL, N5.MANY, N5.DUAL,
                                N5.MANY)
            selectors = [selector]

        attrs = []
        for dep, child in downs[1:]:
            if dep != 'amod':
                return []
            attrs.append(child.text)

        nn = []
        for selector in selectors:
            n = SurfaceCommonNoun(selector=selector,
                                  attributes=list(attrs),
                                  noun=noun)
            nn.append(n)

        return map(lambda n: (None, n), nn)
Example #13
0
    def recog_posdet_nn_head(self, root_token, noun, gram_n2):
        """
        * PRP$ (ADJS) NN(S)
        * WP$ (ADJS) NN(S)
        """
        downs = filter(lambda (rel, child): rel not in ('cc', 'conj', 'prep'),
                       root_token.downs)

        if not downs:
            return []

        rel, possessor = downs[0]
        if rel != 'pos':
            return []

        attrs = []
        for rel, child in downs[1:]:
            if rel != 'amod':
                return []
            attrs.append(child.text)

        nn = []
        for declension in self.personal_mgr.posdet_parse((possessor.text, )):
            pos = PersonalPronoun(declension, PersonalPronounCase.OBJECT)

            correlative = Correlative.DEF
            count_restriction = self.det_pronoun_mgr.cor2res_gno[correlative][
                0]
            selector = Selector.from_correlative(correlative,
                                                 count_restriction)
            if not selector:
                continue
            for selector in selector.restricted_to_grammatical_number(
                    gram_n2, self.det_pronoun_mgr.cor2res_gno):
                n = SurfaceCommonNoun(possessor=pos,
                                      selector=selector,
                                      attributes=list(attrs),
                                      noun=noun)
                nn.append(n)
        return map(lambda n: (None, n), nn)
Example #14
0
    def recog_posdet_nn_head(self, root_token, noun, gram_n2):
        """
        * PRP$ (ADJS) NN(S)
        * WP$ (ADJS) NN(S)
        """
        downs = filter(lambda (rel, child): rel not in ('cc', 'conj', 'prep'),
                       root_token.downs)

        if not downs:
            return []

        rel, possessor = downs[0]
        if rel != 'pos':
            return []

        attrs = []
        for rel, child in downs[1:]:
            if rel != 'amod':
                return []
            attrs.append(child.text)

        nn = []
        for declension in self.personal_mgr.posdet_parse((possessor.text,)):
            pos = PersonalPronoun(declension, PersonalPronounCase.OBJECT)

            correlative = Correlative.DEF
            count_restriction = self.det_pronoun_mgr.cor2res_gno[correlative][0]
            selector = Selector.from_correlative(correlative, count_restriction)
            if not selector:
                continue
            for selector in selector.restricted_to_grammatical_number(
                    gram_n2, self.det_pronoun_mgr.cor2res_gno):
                n = SurfaceCommonNoun(possessor=pos, selector=selector,
                                      attributes=list(attrs), noun=noun)
                nn.append(n)
        return map(lambda n: (None, n), nn)
Example #15
0
    def recog_how_many_nn_head(self, root_token, noun, n2):
        many = None
        for rel, child in root_token.downs:
            if rel == 'amod' and child.text in ('few', 'many'):
                many = child
                break

        if not many:
            return []

        how = None
        for rel, child in many.downs:
            if rel == 'advmod' and child.text == 'how':
                how = child

        number = Number(None)

        correlative = Correlative.INDEF
        count_restriction = self.det_pronoun_mgr.cor2res_gno[correlative][0]
        selector = Selector.from_correlative(correlative, count_restriction)
        assert selector

        n = SurfaceCommonNoun(selector=selector, number=number, noun=noun)
        return [(None, n)]
Example #16
0
    def recog_how_many_nn_head(self, root_token, noun, n2):
        many = None
        for rel, child in root_token.downs:
            if rel == 'amod' and child.text in ('few', 'many'):
                many = child
                break

        if not many:
            return []

        how = None
        for rel, child in many.downs:
            if rel == 'advmod' and child.text == 'how':
                how = child

        number = Number(None)

        correlative = Correlative.INDEF
        count_restriction = self.det_pronoun_mgr.cor2res_gno[correlative][0]
        selector = Selector.from_correlative(correlative, count_restriction)
        assert selector

        n = SurfaceCommonNoun(selector=selector, number=number, noun=noun)
        return [(None, n)]