def test_map_multiple_elements_inline(): ret = parse('{true: false, false: true}') expected = ast.Doc( head=(), val=ast.Map( head=(ast.MapStart('{'), ), items=( ast.MapItem( head=(), key=ast.Bool(val=True, src='true'), inner=(ast.Colon(':'), ast.Space(' ')), val=ast.Bool(val=False, src='false'), tail=(ast.Comma(','), ast.Space(' ')), ), ast.MapItem( head=(), key=ast.Bool(val=False, src='false'), inner=(ast.Colon(':'), ast.Space(' ')), val=ast.Bool(val=True, src='true'), tail=(), ), ), tail=(ast.MapEnd('}'), ), ), tail=(), ) assert ret == expected
def test_bare_word_key_starts_with_other_token(): tokens = tokenize('{true_values: []}') assert tokens == ( ast.MapStart('{'), ast.BareWordKey('true_values', 'true_values'), ast.Colon(':'), ast.Space(' '), ast.ListStart('['), ast.ListEnd(']'), ast.MapEnd('}'), ast.EOF(''), )
def test_map_multiline_trivial(): ret = parse('{\n}') expected = ast.Doc( head=(), val=ast.Map( head=(ast.MapStart('{'), ast.NL('\n')), items=(), tail=(ast.MapEnd('}'), ), ), tail=(), ) assert ret == expected assert ret.val.is_multiline assert not ret.val.is_top_level_style
def test_map_bare_word_key(): ret = parse("{_Key-str1ng: 'value'}") expected = ast.Doc( head=(), val=ast.Map( head=(ast.MapStart('{'), ), items=(ast.MapItem( head=(), key=ast.BareWordKey('_Key-str1ng', '_Key-str1ng'), inner=(ast.Colon(':'), ast.Space(' ')), val=ast.String(val='value', src="'value'"), tail=(), ), ), tail=(ast.MapEnd('}'), ), ), tail=(), ) assert ret == expected
def test_comment_at_start_of_multiline_json(): ret = parse('{ # bar\n' ' true: false,\n' '}') expected = ast.Doc( head=(), val=ast.Map( head=( ast.MapStart('{'), ast.Space(' '), ast.Space(' '), ast.Comment('# bar\n'), ), items=(ast.MapItem( head=(ast.Indent(' '), ), key=ast.Bool(val=True, src='true'), inner=(ast.Colon(':'), ast.Space(' ')), val=ast.Bool(val=False, src='false'), tail=(ast.Comma(','), ast.NL('\n')), ), ), tail=(ast.MapEnd('}'), ), ), tail=(), ) assert ret == expected
ret = [] ret.extend(_to_tokens(k, settings, key=True)) ret.extend((ast.Colon(':'), ast.Space(' '))) ret.extend(_to_tokens(v, settings)) return ret ContainerSettings = collections.namedtuple( 'ContainerSettings', ('start', 'end', 'item_func', 'to_iter'), ) _map_tokens = functools.partial( _container, container_settings=ContainerSettings( start=ast.MapStart('{'), end=ast.MapEnd('}'), item_func=_map_item_tokens, to_iter=lambda m: tuple(m.items()), ), ) _list_tokens = functools.partial( _container, container_settings=ContainerSettings( start=ast.ListStart('['), end=ast.ListEnd(']'), item_func=_to_tokens, to_iter=tuple, ), )