コード例 #1
0
def test_get_conll_tags_no_token_list():
    tokenization = Tokenization()

    assert _get_conll_tags_for_tokenization(tokenization) == []

    mock_filter = Mock(return_value=[])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter) == []
コード例 #2
0
def test_get_conll_tags_no_token_list():
    tokenization = Tokenization()

    assert _get_conll_tags_for_tokenization(tokenization) == []

    mock_filter = Mock(return_value=[])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter) == []
コード例 #3
0
def test_get_conll_tags_two_tokens():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[
            Token(tokenIndex=0, text='t0'),
            Token(tokenIndex=1, text='t1'),
        ]),
        dependencyParseList=sentinel.dpl,
    )

    mock_filter_zero = Mock(return_value=[])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_zero) == []
    mock_filter_zero.assert_called_with(sentinel.dpl)

    mock_filter_one_empty = Mock(return_value=[
        DependencyParse(dependencyList=[]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_one_empty) == [
        [(u'', u''), (u'', u'')],
    ]
    mock_filter_one_empty.assert_called_with(sentinel.dpl)

    mock_filter_one_half_empty = Mock(return_value=[
        DependencyParse(dependencyList=[
            Dependency(gov=0, dep=1, edgeType='edge_1/0'),
        ]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_one_half_empty) == [
        [(u'', u''), (u'1', u'edge_1/0')],
    ]
    mock_filter_one_half_empty.assert_called_with(sentinel.dpl)

    mock_filter_one = Mock(return_value=[
        DependencyParse(dependencyList=[
            Dependency(gov=-1, dep=0, edgeType='edge_0/0'),
            Dependency(gov=0, dep=1, edgeType='edge_1/0'),
        ]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_one) == [
        [(u'0', u'edge_0/0'), (u'1', u'edge_1/0')],
    ]
    mock_filter_one.assert_called_with(sentinel.dpl)

    mock_filter_two = Mock(return_value=[
        DependencyParse(dependencyList=[
            Dependency(gov=-1, dep=0, edgeType='edge_0/0'),
            Dependency(gov=0, dep=1, edgeType='edge_1/0'),
        ]),
        DependencyParse(dependencyList=[
            Dependency(gov=-1, dep=0, edgeType='edge_0/1'),
            Dependency(gov=0, dep=1, edgeType='edge_1/1'),
        ]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_two) == [
        [(u'0', u'edge_0/0'), (u'1', u'edge_1/0')],
        [(u'0', u'edge_0/1'), (u'1', u'edge_1/1')],
    ]
    mock_filter_two.assert_called_with(sentinel.dpl)
コード例 #4
0
def test_get_conll_tags_zero_tokens_implicit_filter():
    tokenization = Tokenization(tokenList=TokenList(tokenList=[]),
                                dependencyParseList=[
                                    DependencyParse(dependencyList=[]),
                                ])

    assert _get_conll_tags_for_tokenization(tokenization) == [[]]
コード例 #5
0
def test_get_conll_tags_one_token():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[
            Token(tokenIndex=0, text='t0'),
        ]),
        dependencyParseList=sentinel.dpl,
    )

    mock_filter_zero = Mock(return_value=[])
    assert _get_conll_tags_for_tokenization(tokenization,
                                            mock_filter_zero) == []
    mock_filter_zero.assert_called_with(sentinel.dpl)

    mock_filter_one_empty = Mock(return_value=[
        DependencyParse(dependencyList=[]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization,
                                            mock_filter_one_empty) == [
                                                [(u'', u'')],
                                            ]
    mock_filter_one_empty.assert_called_with(sentinel.dpl)

    mock_filter_one = Mock(return_value=[
        DependencyParse(dependencyList=[
            Dependency(gov=-1, dep=0, edgeType='edge_0/0'),
        ]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_one) == [
        [(u'0', u'edge_0/0')],
    ]
    mock_filter_one.assert_called_with(sentinel.dpl)

    mock_filter_two = Mock(return_value=[
        DependencyParse(dependencyList=[
            Dependency(gov=-1, dep=0, edgeType='edge_0/0'),
        ]),
        DependencyParse(dependencyList=[
            Dependency(gov=-1, dep=0, edgeType='edge_0/1'),
        ]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter_two) == [
        [(u'0', u'edge_0/0')],
        [(u'0', u'edge_0/1')],
    ]
    mock_filter_two.assert_called_with(sentinel.dpl)
コード例 #6
0
def test_get_conll_tags_zero_tokens_implicit_filter():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[]),
        dependencyParseList=[
            DependencyParse(dependencyList=[]),
        ]
    )

    assert _get_conll_tags_for_tokenization(tokenization) == [[]]
コード例 #7
0
def test_get_conll_tags_zero_tokens():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[]),
        dependencyParseList=sentinel.dpl,
    )

    mock_filter = Mock(return_value=[
        DependencyParse(dependencyList=[]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter) == [[]]
    mock_filter.assert_called_with(sentinel.dpl)
コード例 #8
0
def test_get_conll_tags_zero_tokens():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[]),
        dependencyParseList=sentinel.dpl,
    )

    mock_filter = Mock(return_value=[
        DependencyParse(dependencyList=[]),
    ])
    assert _get_conll_tags_for_tokenization(tokenization, mock_filter) == [[]]
    mock_filter.assert_called_with(sentinel.dpl)
コード例 #9
0
def test_get_conll_tags_one_token_implicit_filter():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[
            Token(tokenIndex=0, text='t0'),
        ]),
        dependencyParseList=[
            DependencyParse(dependencyList=[
                Dependency(gov=-1, dep=0, edgeType='edge_0/0'),
            ]),
        ],
    )

    assert _get_conll_tags_for_tokenization(tokenization) == [
        [(u'0', u'edge_0/0')],
    ]
コード例 #10
0
def test_get_conll_tags_one_token_implicit_filter():
    tokenization = Tokenization(
        tokenList=TokenList(tokenList=[
            Token(tokenIndex=0, text='t0'),
        ]),
        dependencyParseList=[
            DependencyParse(dependencyList=[
                Dependency(gov=-1, dep=0, edgeType='edge_0/0'),
            ]),
        ],
    )

    assert _get_conll_tags_for_tokenization(tokenization) == [
        [(u'0', u'edge_0/0')],
    ]