Example #1
0
 def test_odd_quotes(self):
     """Test that an odd # of single quotes generates an error."""
     content = "((h_ ,'p)h p,g()[],:_)hpg;"
     tok = NewickTokenizer(StringIO(content))
     self.assertRaises(Exception, tok.tokens)
     content = "((h_ ,'p')h p,'g()[]',:_')hpg;"
     tok = NewickTokenizer(StringIO(content))
     self.assertRaises(Exception, tok.tokens)
Example #2
0
    def _do_test(self, content, expected):
        """Part of the testing harness.

        Writes `content`, the parses and compares to `expected`.
        """
        self.assertEqual(list(NewickTokenizer(StringIO(content))), expected)
        self.assertEqual(list(NewickTokenizer(newick=content)), expected)
        fp = path_map.next_unique_scratch_filepath('tok_test')
        try:
            write_to_filepath(content, fp)
            self.assertEqual(list(NewickTokenizer(filepath=fp)), expected)
        finally:
            try:
                os.unlink(fp)
            except:  # pragma: no cover
                pass
Example #3
0
    def _do_test(self, content, expected):
        """Part of the testing harness.

        Writes `content`, the parses and compares to `expected`.
        """
        nt = NewickTokenizer(stream=StringIO(content))
        e = [deepcopy(i) for i in NewickEventFactory(tokenizer=nt)]
        self.assertEqual(e, expected)
        new_e = []

        def append_to_new_e(event):
            new_e.append(deepcopy(event))

        NewickEventFactory(newick=content, event_handler=append_to_new_e)
        self.assertEqual(new_e, expected)
Example #4
0
 def test_open_closed(self):
     """Tests that () generates an error."""
     nt = NewickTokenizer(newick='(a,(),(d,e));')
     self.assertRaises(ValueError, nt.tokens)
Example #5
0
 def test_label(self):
     """Test that unquoted labels with newline character generate errors."""
     nt = NewickTokenizer(newick="(a\n'b',(b,c),(d,e));")
     self.assertRaises(ValueError, nt.tokens)
Example #6
0
 def test_unexpected_comma(self):
     """Test consecutive commas generate errors."""
     nt = NewickTokenizer(newick='(a,(b,c),,(d,e));')
     self.assertRaises(ValueError, nt.tokens)
Example #7
0
 def test_comma_bef_semicolon(self):
     """Test that terminating ; is not preceded by ,."""
     nt = NewickTokenizer(newick='(a,(b,c),(d,e)),;')
     self.assertRaises(ValueError, nt.tokens)
Example #8
0
 def test_unclosed_comment(self):
     """Tests that unclosed [] comments generate errors."""
     nt = NewickTokenizer(newick='(a,(b,c),[(d,e));')
     self.assertRaises(ValueError, nt.tokens)
Example #9
0
 def test_sans_comma(self):
     """Tests error with a comma is omitted."""
     nt = NewickTokenizer(newick='(a,(b,c)(d,e));')
     self.assertRaises(ValueError, nt.tokens)
Example #10
0
 def test_peek_none(self):
     """Tests behavior of peek when there are no more tokens."""
     nt = NewickTokenizer(newick='(a,(b,c));')
     nt.tokens()
     self.assertIsNone(nt._peek())
Example #11
0
 def test_extra_closed(self):
     """Tests Newick with extra close parens generate errors."""
     nt = NewickTokenizer(newick='(a,(b,c)));')
     self.assertRaises(ValueError, nt.tokens)
Example #12
0
 def test_unclosed(self):
     """Tests Newick open parens are closed."""
     nt = NewickTokenizer(newick='(a,(b,c)')
     self.assertRaises(ValueError, nt.tokens)
Example #13
0
 def test_extra_suffix(self):
     """Tests Newick must end with ) or ;."""
     nt = NewickTokenizer(newick='(a,(b,c));suffix')
     self.assertRaises(ValueError, nt.tokens)