def test_4(self): '''test that sequence_node_from_tokens works with there is a nested sequence''' self.assertEqual( sequence_node_from_tokens(tokens=[ SequenceStart(), SequenceStart(), ScalarValue(data='value'), SequenceEnd(), SequenceEnd(), ], start=0), (SequenceNode( items=[SequenceNode(items=[ScalarNode(data='value')])]), 5))
def test_2(self): '''test that sequence_node_from_tokens works when there is no item''' self.assertEqual( sequence_node_from_tokens(tokens=[ SequenceStart(), SequenceEnd(), ], start=0), (SequenceNode(items=[]), 2))
def test_is_sequence(self): self.assertTrue( is_sequence(tokens=[ SequenceStart(), ScalarValue(data='value'), SequenceEnd(), ], start=0))
def test_1(self): '''test that sequence_node_from_tokens works for the simplest case''' self.assertEqual( sequence_node_from_tokens(tokens=[ SequenceStart(), ScalarValue(data='value'), SequenceEnd(), ], start=0), (SequenceNode(items=[ScalarNode(data='value')]), 3))
def test_3(self): '''test that sequence_node_from_tokens works with more than one item''' self.assertEqual( sequence_node_from_tokens(tokens=[ SequenceStart(), ScalarValue(data='value'), Delimiter(), ScalarValue(data='value'), SequenceEnd(), ], start=0), (SequenceNode( items=[ScalarNode(data='value'), ScalarNode(data='value')]), 5))
def test_3(self): '''Test that a sequence is converted to the expected token''' self.assertEqual( tokenize(string='["key", "value", "key2","value"]'), [ SequenceStart(), ScalarValue(data='key'), Delimiter(), ScalarValue(data='value'), Delimiter(), ScalarValue(data='key2'), Delimiter(), ScalarValue(data='value'), SequenceEnd() ] )
def test_5(self): '''test that sequence_node_from_tokens works with it contains a mapping''' self.assertEqual( sequence_node_from_tokens(tokens=[ SequenceStart(), MappingStart(), ScalarValue(data='key'), Separator(), ScalarValue(data='value'), MappingEnd(), SequenceEnd(), ], start=0), (SequenceNode(items=[ MappingNode( mapping={ScalarNode(data='key'): ScalarNode(data='value')}) ]), 7))
def test_4(self): '''test that mapping_node_from_tokens works when the value is a sequence''' self.assertEqual( mapping_node_from_tokens(tokens=[ MappingStart(), ScalarValue(data='key'), Separator(), SequenceStart(), ScalarValue(data='value'), SequenceEnd(), MappingEnd(), ], start=0), (MappingNode( mapping={ ScalarNode(data='key'): SequenceNode(items=[ScalarNode(data='value')]), }), 7))
def test_4(self): '''Test that a string is converted to its expected tokens when it spans more than one line''' self.assertEqual( tokenize(string='''[ "key", "value", "key2", "value" ]''' ), [ SequenceStart(), ScalarValue(data='key'), Delimiter(), ScalarValue(data='value'), Delimiter(), ScalarValue(data='key2'), Delimiter(), ScalarValue(data='value'), SequenceEnd() ] )