Esempio n. 1
0
def test_context_to_paragraph_exceptions2():
    tokenized = [
        tokens.Verb(tokens.Verb.PUT, active=True),
        tokens.Context(['2']),
        tokens.TokenList([tokens.Paragraph.make(part='3')])
    ]
    assert tokenized == amdparser.context_to_paragraph(tokenized)
def test_context_to_paragraph_exceptions2():
    tokenized = [
        tokens.Verb(tokens.Verb.PUT, active=True),
        tokens.Context(['2']),
        tokens.TokenList([tokens.Paragraph.make(part='3')])
    ]
    assert tokenized == amdparser.context_to_paragraph(tokenized)
Esempio n. 3
0
    def test_context_to_paragraph_exceptions(self):
        tokenized = [
            tokens.Verb(tokens.Verb.PUT, active=True),
            tokens.Context(['2']),
            tokens.Paragraph(part='3')
        ]
        converted = amdparser.context_to_paragraph(tokenized)
        self.assertEqual(tokenized, converted)

        tokenized = [
            tokens.Verb(tokens.Verb.PUT, active=True),
            tokens.Context(['2']),
            tokens.TokenList([tokens.Paragraph(part='3')])
        ]
        converted = amdparser.context_to_paragraph(tokenized)
        self.assertEqual(tokenized, converted)
    def test_context_to_paragraph_exceptions(self):
        tokenized = [
            tokens.Verb(tokens.Verb.PUT, active=True),
            tokens.Context(['2']),
            tokens.Paragraph(part='3')
        ]
        converted = amdparser.context_to_paragraph(tokenized)
        self.assertEqual(tokenized, converted)

        tokenized = [
            tokens.Verb(tokens.Verb.PUT, active=True),
            tokens.Context(['2']),
            tokens.TokenList([tokens.Paragraph(part='3')])
        ]
        converted = amdparser.context_to_paragraph(tokenized)
        self.assertEqual(tokenized, converted)
Esempio n. 5
0
def test_context_to_paragraph():
    tokenized = [
        tokens.Context(['1']),
        tokens.Verb(tokens.Verb.PUT, active=True),
        tokens.Context(['2']),
        tokens.Context(['3'], certain=True),
        tokens.Context(['4'])
    ]
    assert amdparser.context_to_paragraph(tokenized) == [
        tokens.Context(['1']),
        tokens.Verb(tokens.Verb.PUT, active=True),
        tokens.Paragraph.make(part='2'),
        tokens.Context(['3'], certain=True),
        tokens.Paragraph.make(part='4')
    ]
def test_context_to_paragraph():
    tokenized = [
        tokens.Context(['1']),
        tokens.Verb(tokens.Verb.PUT, active=True),
        tokens.Context(['2']),
        tokens.Context(['3'], certain=True),
        tokens.Context(['4'])
    ]
    assert amdparser.context_to_paragraph(tokenized) == [
        tokens.Context(['1']),
        tokens.Verb(tokens.Verb.PUT, active=True),
        tokens.Paragraph.make(part='2'),
        tokens.Context(['3'], certain=True),
        tokens.Paragraph.make(part='4')
    ]
Esempio n. 7
0
 def test_context_to_paragraph(self):
     tokenized = [
         tokens.Context(['1']),
         tokens.Verb(tokens.Verb.PUT, active=True),
         tokens.Context(['2']),
         tokens.Context(['3'], certain=True),
         tokens.Context(['4'])
     ]
     converted = amdparser.context_to_paragraph(tokenized)
     self.assertEqual(converted, [
         tokens.Context(['1']),
         tokens.Verb(tokens.Verb.PUT, active=True),
         tokens.Paragraph(part='2'),
         tokens.Context(['3'], certain=True),
         tokens.Paragraph(part='4')
     ])
 def test_context_to_paragraph(self):
     tokenized = [
         tokens.Context(['1']),
         tokens.Verb(tokens.Verb.PUT, active=True),
         tokens.Context(['2']),
         tokens.Context(['3'], certain=True),
         tokens.Context(['4'])
     ]
     converted = amdparser.context_to_paragraph(tokenized)
     self.assertEqual(converted, [
         tokens.Context(['1']),
         tokens.Verb(tokens.Verb.PUT, active=True),
         tokens.Paragraph(part='2'),
         tokens.Context(['3'], certain=True),
         tokens.Paragraph(part='4')
     ])