Exemple #1
0
def main():
    m = ProAdverbManager()

    proximal_sing = Selector(Correlative.PROX, N5.SING, N5.SING, N5.SING,
                             N5.SING)
    r = m.say(PLACE_PREP, proximal_sing, 'place', False)
    assert r.tokens == ['here']
    assert r.conjugation == Conjugation.S3
    assert r.eat_prep == True

    ss = 'here',
    aaa = m.parse(ss)
    assert len(aaa) == 1
    prep, selector, noun = aaa[0]
    assert prep == PLACE_PREP
    assert selector.dump() == proximal_sing.dump()
    assert noun == 'place'

    existential_sing = Selector(Correlative.EXIST, N5.SING, N5.SING, N5.DUAL,
                                N5.MANY)
    r = m.say(None, existential_sing, 'person', False)
    assert r.tokens == ['someone']
    assert r.conjugation == Conjugation.S3
    assert r.eat_prep == False

    ss = 'someone',
    aaa = m.parse(ss)
    assert len(aaa) == 1
    prep, selector, noun = aaa[0]
    assert prep == None
    assert selector.dump() == existential_sing.dump()
    assert noun == 'person'
Exemple #2
0
def test_say(m):
    selector = Selector(Correlative.DEF, N5.FEW, N5.FEW, N5.FEW, N5.FEW)
    r = SayResult(tokens=['the'], conjugation=Conjugation.P3, eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.DEF, N5.SING, N5.SING, N5.SING, N5.SING)
    r = SayResult(tokens=['the'], conjugation=Conjugation.S3, eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.DEF, N5.SING, N5.SING, N5.SING, N5.SING)
    r = None
    say(m, selector, True, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.DUAL, N5.DUAL)
    r = SayResult(tokens=['neither'],
                  conjugation=Conjugation.P3,
                  eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.DUAL, N5.DUAL)
    r = SayResult(tokens=['neither'],
                  conjugation=Conjugation.P3,
                  eat_prep=False)
    say(m, selector, True, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.FEW, N5.MANY)
    r = SayResult(tokens=['no'], conjugation=Conjugation.P3, eat_prep=False)
    say(m, selector, False, r)

    selector = Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.FEW, N5.MANY)
    r = SayResult(tokens=['none'], conjugation=Conjugation.P3, eat_prep=False)
    say(m, selector, True, r)
Exemple #3
0
def test_parse(m):
    parse(m, 'the', [])

    parse(m, 'the _', [
        Selector(Correlative.DEF, N5.SING, N5.MANY, N5.SING, N5.MANY),
    ])

    parse(m, 'this', [
        Selector(Correlative.PROX, N5.SING, N5.SING, N5.SING, N5.SING),
    ])

    parse(m, 'both', [
        Selector(Correlative.UNIV_ALL, N5.DUAL, N5.DUAL, N5.DUAL, N5.DUAL),
    ])

    parse(m, 'neither', [
        Selector(Correlative.NEG, N5.ZERO, N5.ZERO, N5.DUAL, N5.DUAL),
    ])

    parse(m, 'either', [
        Selector(Correlative.ELECT_ANY, N5.SING, N5.SING, N5.DUAL, N5.DUAL),
    ])

    parse(m, 'any', [
        Selector(Correlative.ELECT_ANY, N5.SING, N5.MANY, N5.FEW, N5.MANY),
    ])
Exemple #4
0
def combine_entries(aaa, cor2res_gno):
    """
    list of (Correlative, is pro, is plur, out of) -> (Selectors, Selectors)
    """
    # They are all the same Correlative except for "some" (INDEF + EXIST).
    cor_pro2ns_ofs = defaultdict(list)
    for correlative, is_pro, is_plur, of in aaa:
        if is_plur:
            nn = [N5.ZERO, N5.DUAL, N5.FEW, N5.MANY]
        else:
            nn = [N5.SING]
        nn = filter(lambda n: n <= of, nn)
        for n in nn:
            cor_pro2ns_ofs[(correlative, is_pro)].append((n, of))

    # For each grouping,
    dets = []
    pros = []
    for correlative, is_pro in sorted(cor_pro2ns_ofs):
        ns_ofs = cor_pro2ns_ofs[(correlative, is_pro)]

        # Collect the variety of how many/out of there are for this word.
        ns, ofs = map(set, zip(*ns_ofs))

        # Require that each out-of range is contiguous.  This is also because it
        # happens to be true and it allows Selectors to contain ranges instead
        # of the insanity of individual N5s.
        ofs = sorted(ofs)
        assert ofs == range(ofs[0], ofs[-1] + 1)

        for n_min, n_max in split_into_ranges(sorted(ns)):
            # Get the possible range of how many they were selected from.
            of_n_min = min(ofs)
            of_n_max = max(ofs)

            # Create a Selector that covers all of those tuples.
            r = Selector(correlative, n_min, n_max, of_n_min, of_n_max)
            count_restriction, _ = cor2res_gno[correlative]
            r = r.fitted_to_count_restriction(count_restriction)
            if not r:
                continue

            if is_pro:
                pros.append(r)
            else:
                dets.append(r)

    return dets, pros
Exemple #5
0
    def recog_dt_nn_head(self, root_token, noun, gram_n2):
        """
        * (ADJS) NN(S)     "fat mice"
        * DT (ADJS) NN(S)  "the fat mice"
        """
        downs = filter(lambda (rel, child): rel not in ('cc', 'conj', 'prep'),
                       root_token.downs)
        if downs:
            dep, child = downs[0]
            if dep != 'det':
                return []

            s = child.text
            if s == 'a' or s == 'an':
                s = A_OR_AN

            maybe_selectors = self.det_pronoun_mgr.parse_determiner(s)

            selectors = []
            for maybe_selector in maybe_selectors:
                for sel in maybe_selector.restricted_to_grammatical_number(
                        gram_n2, self.det_pronoun_mgr.cor2res_gno):
                    selectors.append(sel)
        else:
            if gram_n2 == N2.SING:
                return []

            selector = Selector(Correlative.INDEF, N5.DUAL, N5.MANY, N5.DUAL,
                                N5.MANY)
            selectors = [selector]

        attrs = []
        for dep, child in downs[1:]:
            if dep != 'amod':
                return []
            attrs.append(child.text)

        nn = []
        for selector in selectors:
            n = SurfaceCommonNoun(selector=selector,
                                  attributes=list(attrs),
                                  noun=noun)
            nn.append(n)

        return map(lambda n: (None, n), nn)