def test_tokenFailed(self): """ On failure, L{OMetaBase.rule_token} produces an error indicating the position where match failure occurred and the expected character. """ data = "foozle" o = OMetaBase(data) try: o.rule_token('fog') except _MaybeParseError, e: self.assertEqual(e[0], 2) self.assertEqual(e[1], expected("token", "fog"))
def test_tokenFailed(self): """ On failure, L{OMetaBase.rule_token} produces an error indicating the position where match failure occurred and the expected character. """ data = "foozle" o = OMetaBase(data) try: o.rule_token('fog') except _MaybeParseError as e: self.assertEqual(e[0], 2) self.assertEqual(e[1], expected("token", "fog")) else: self.fail('_MaybeParseError not raised')
def test_token(self): """ L{OMetaBase.rule_token} matches all the characters in the given string plus any preceding whitespace. """ data = " foo bar" o = OMetaBase(data) v, e = o.rule_token("foo") self.assertEqual(v, "foo") self.assertEqual(e[0], 4) v, e = o.rule_token("bar") self.assertEqual(v, "bar") self.assertEqual(e[0], 8)