Пример #1
0
 def test_init_two_field(self):
     f1 = Field("name", "value", 2)
     f2 = Field("other name", "other value")
     paragraph = Paragraph(f1, f2)
     self.assertEqual(len(paragraph), 2)
     paragraph.add_field(Field("other name", "other value"))
     self.assertEqual(len(paragraph), 3)
Пример #2
0
 def _tokenize_paragraph(self, txt, start_line_number):
     """Tokenize a paragraph. Return a list of fields with their respective
     values"""
     paragraph = Paragraph()
     current_field = None
     for line_number, line in enumerate(txt.splitlines(True)):
         if line.startswith(" "):
             if current_field:
                 current_field.add_content(line)
             else:
                 current_field = Field("",
                                       line,
                                       line_number + start_line_number + 1)
                 paragraph.add_field(current_field)
             continue
         line_splited = line.split(":")
         field_name = line_splited[0]
         value = ":".join(line_splited[1:])
         current_field = Field(field_name,
                               value,
                               line_number + start_line_number + 1)
         paragraph.add_field(current_field)
     return paragraph
Пример #3
0
 def test_init_one_field(self):
     paragraph = Paragraph(Field("name", "value", 2))
     self.assertEqual(len(paragraph), 1)
     paragraph.add_field(Field("other name", "other value"))
     self.assertEqual(len(paragraph), 2)