def test_class_tokens_are_put_in_class_block(): given = tokens_from_string('''\ def a_func(thing): pass class ImportantClass(): """I'm parsed correctly""" def __init__(self): pass def another_func(): pass ''') expected_tokens = _generate_node_tokens_from_string('''\ ## delete from test ## ## delete from test ## ## delete from test ## class ImportantClass(): """I'm parsed correctly""" ## delete from test ## ## delete from test ## ''') module = parse(given) assert module.children[1].tokens[:-1] == expected_tokens[:-1] # The dedent token (the last token) will look different because if way the test is generated assert module.children[1].tokens[-1] == TokenInfo(type=DEDENT, string='', start=(10, 0), end=(10, 0), line='def another_func():\n')
def test_module_tokens_are_put_in_root_block(tokens_with_normal_python): module = parse(tokens_with_normal_python) expected_module_tokens = [ TokenInfo(type=1, string='from', start=(1, 0), end=(1, 4), line='from somewhere import rainbow\n'), TokenInfo(type=1, string='somewhere', start=(1, 5), end=(1, 14), line='from somewhere import rainbow\n'), TokenInfo(type=1, string='import', start=(1, 15), end=(1, 21), line='from somewhere import rainbow\n'), TokenInfo(type=1, string='rainbow', start=(1, 22), end=(1, 29), line='from somewhere import rainbow\n'), TokenInfo(type=4, string='\n', start=(1, 29), end=(1, 30), line='from somewhere import rainbow\n'), TokenInfo(type=1, string='friends', start=(2, 0), end=(2, 7), line="friends = ['dog']\n"), TokenInfo(type=52, string='=', start=(2, 8), end=(2, 9), line="friends = ['dog']\n"), TokenInfo(type=52, string='[', start=(2, 10), end=(2, 11), line="friends = ['dog']\n"), TokenInfo(type=3, string="'dog'", start=(2, 11), end=(2, 16), line="friends = ['dog']\n"), TokenInfo(type=52, string=']', start=(2, 16), end=(2, 17), line="friends = ['dog']\n"), TokenInfo(type=4, string='\n', start=(2, 17), end=(2, 18), line="friends = ['dog']\n"), TokenInfo(type=55, string='\n', start=(3, 0), end=(3, 1), line='\n'), TokenInfo(type=1, string='destination', start=(9, 0), end=(9, 11), line='destination = a_journey(friends)\n'), TokenInfo(type=52, string='=', start=(9, 12), end=(9, 13), line='destination = a_journey(friends)\n'), TokenInfo(type=1, string='a_journey', start=(9, 14), end=(9, 23), line='destination = a_journey(friends)\n'), TokenInfo(type=52, string='(', start=(9, 23), end=(9, 24), line='destination = a_journey(friends)\n'), TokenInfo(type=1, string='friends', start=(9, 24), end=(9, 31), line='destination = a_journey(friends)\n'), TokenInfo(type=52, string=')', start=(9, 31), end=(9, 32), line='destination = a_journey(friends)\n'), TokenInfo(type=4, string='\n', start=(9, 32), end=(9, 33), line='destination = a_journey(friends)\n'), TokenInfo(type=55, string='\n', start=(10, 0), end=(10, 1), line='\n'), TokenInfo(type=0, string='', start=(11, 0), end=(11, 0), line=''), ] assert module.tokens == expected_module_tokens
def test_module_tokens_are_put_in_root_node(): given = tokens_from_string('''\ from somewhere import rainbow friends = ['dog'] def a_journey(friends): import yellow.brick return 'home' destination = a_journey(friends) ''') expected_tokens = _generate_tokens_from_string('''\ from somewhere import rainbow friends = ['dog'] ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## destination = a_journey(friends) ''') module = parse(given) assert module.tokens == expected_tokens
def test_bdd_blocks_are_parsed_correctly(tokens_with_bdd_blocks): bag_of_bones = parse(tokens_with_bdd_blocks) assert FUNCTION == bag_of_bones.children[0].block_type assert 3 == len(bag_of_bones.children[0].children) assert BDD_BLOCK == bag_of_bones.children[0].children[0].block_type assert BDD_BLOCK == bag_of_bones.children[0].children[1].block_type assert BDD_BLOCK == bag_of_bones.children[0].children[2].block_type
def _run_bones_tests(file): tokens = generate_tokens(file.readline) bones_tree_root = parse(tokens) healthy_bones = suppress_mutations(bones_tree_root) python_tokens = bones_tree.flatten(healthy_bones) module = untokenize(python_tokens) exec(compile(module, '', 'exec'))
def test_when_nested_indents_then_blocks_are_parsed_correctly(tokens_with_nested_indents): bag_of_bones = parse(tokens_with_nested_indents) assert 3 == len(bag_of_bones.children) assert 1 == len(bag_of_bones.children[1].children) assert FUNCTION == bag_of_bones.children[0].block_type assert CLASS == bag_of_bones.children[1].block_type assert FUNCTION == bag_of_bones.children[2].block_type
def main(): args = docopt.docopt(__doc__) file_name = args['<FILE>'] f = open(file_name) tokens = generate_tokens(f.readline) bag_of_bones = parse(tokens) suppress_mutations(bag_of_bones)
def test_when_nested_indents_then_blocks_are_parsed_correctly( tokens_with_nested_indents): bag_of_bones = parse(tokens_with_nested_indents) assert 3 == len(bag_of_bones.children) assert 1 == len(bag_of_bones.children[1].children) assert FUNCTION == bag_of_bones.children[0].block_type assert CLASS == bag_of_bones.children[1].block_type assert FUNCTION == bag_of_bones.children[2].block_type
def test_function_tokens_are_put_in_function_blocks(tokens_with_2_normal_functions): module = parse(tokens_with_2_normal_functions) expected_function_1_tokens = [ TokenInfo(type=1, string='def', start=(4, 0), end=(4, 3), line='def new_test(thing):\n'), TokenInfo(type=1, string='new_test', start=(4, 4), end=(4, 12), line='def new_test(thing):\n'), TokenInfo(type=52, string='(', start=(4, 12), end=(4, 13), line='def new_test(thing):\n'), TokenInfo(type=1, string='thing', start=(4, 13), end=(4, 18), line='def new_test(thing):\n'), TokenInfo(type=52, string=')', start=(4, 18), end=(4, 19), line='def new_test(thing):\n'), TokenInfo(type=52, string=':', start=(4, 19), end=(4, 20), line='def new_test(thing):\n'), TokenInfo(type=4, string='\n', start=(4, 20), end=(4, 21), line='def new_test(thing):\n'), TokenInfo(type=5, string=' ', start=(5, 0), end=(5, 4), line=' from special_date_module import is_new\n'), TokenInfo(type=1, string='from', start=(5, 4), end=(5, 8), line=' from special_date_module import is_new\n'), TokenInfo(type=1, string='special_date_module', start=(5, 9), end=(5, 28), line=' from special_date_module import is_new\n'), TokenInfo(type=1, string='import', start=(5, 29), end=(5, 35), line=' from special_date_module import is_new\n'), TokenInfo(type=1, string='is_new', start=(5, 36), end=(5, 42), line=' from special_date_module import is_new\n'), TokenInfo(type=4, string='\n', start=(5, 42), end=(5, 43), line=' from special_date_module import is_new\n'), TokenInfo(type=1, string='return', start=(6, 4), end=(6, 10), line=' return is_new(thing)\n'), TokenInfo(type=1, string='is_new', start=(6, 11), end=(6, 17), line=' return is_new(thing)\n'), TokenInfo(type=52, string='(', start=(6, 17), end=(6, 18), line=' return is_new(thing)\n'), TokenInfo(type=1, string='thing', start=(6, 18), end=(6, 23), line=' return is_new(thing)\n'), TokenInfo(type=52, string=')', start=(6, 23), end=(6, 24), line=' return is_new(thing)\n'), TokenInfo(type=4, string='\n', start=(6, 24), end=(6, 25), line=' return is_new(thing)\n'), TokenInfo(type=55, string='\n', start=(7, 0), end=(7, 1), line='\n'), TokenInfo(type=6, string='', start=(8, 0), end=(8, 0), line='class SneakyClass():\n'), ] expected_function_2_tokens = [ TokenInfo(type=1, string='def', start=(12, 0), end=(12, 3), line='def old_test(thing):\n'), TokenInfo(type=1, string='old_test', start=(12, 4), end=(12, 12), line='def old_test(thing):\n'), TokenInfo(type=52, string='(', start=(12, 12), end=(12, 13), line='def old_test(thing):\n'), TokenInfo(type=1, string='thing', start=(12, 13), end=(12, 18), line='def old_test(thing):\n'), TokenInfo(type=52, string=')', start=(12, 18), end=(12, 19), line='def old_test(thing):\n'), TokenInfo(type=52, string=':', start=(12, 19), end=(12, 20), line='def old_test(thing):\n'), TokenInfo(type=4, string='\n', start=(12, 20), end=(12, 21), line='def old_test(thing):\n'), TokenInfo(type=5, string=' ', start=(13, 0), end=(13, 4), line=' from special_date_module import is_old\n'), TokenInfo(type=1, string='from', start=(13, 4), end=(13, 8), line=' from special_date_module import is_old\n'), TokenInfo(type=1, string='special_date_module', start=(13, 9), end=(13, 28), line=' from special_date_module import is_old\n'), TokenInfo(type=1, string='import', start=(13, 29), end=(13, 35), line=' from special_date_module import is_old\n'), TokenInfo(type=1, string='is_old', start=(13, 36), end=(13, 42), line=' from special_date_module import is_old\n'), TokenInfo(type=4, string='\n', start=(13, 42), end=(13, 43), line=' from special_date_module import is_old\n'), TokenInfo(type=1, string='return', start=(14, 4), end=(14, 10), line=' return is_old(thing)\n'), TokenInfo(type=1, string='is_old', start=(14, 11), end=(14, 17), line=' return is_old(thing)\n'), TokenInfo(type=52, string='(', start=(14, 17), end=(14, 18), line=' return is_old(thing)\n'), TokenInfo(type=1, string='thing', start=(14, 18), end=(14, 23), line=' return is_old(thing)\n'), TokenInfo(type=52, string=')', start=(14, 23), end=(14, 24), line=' return is_old(thing)\n'), TokenInfo(type=4, string='\n', start=(14, 24), end=(14, 25), line=' return is_old(thing)\n'), TokenInfo(type=55, string='\n', start=(15, 0), end=(15, 1), line='\n'), TokenInfo(type=55, string='\n', start=(16, 0), end=(16, 1), line='\n'), TokenInfo(type=6, string='', start=(17, 0), end=(17, 0), line='good = new_test(thing) and not old_test(thing)\n'), ] assert module.children[0].tokens == expected_function_1_tokens assert module.children[2].tokens == expected_function_2_tokens
def test_root_tokens_are_parsed_correctly(tokens_with_3_classes): module = parse(tokens_with_3_classes) assert 5 == len(module.children) assert 1 == len(module.children[1].children) assert 2 == len(module.children[2].children) assert 1 == len(module.children[4].children) assert FUNCTION == module.children[0].block_type assert CLASS == module.children[1].block_type assert CLASS == module.children[2].block_type assert FUNCTION == module.children[3].block_type assert CLASS == module.children[4].block_type
def test_class_tokens_are_put_in_class_block(tokens_with_a_normal_python_class): module = parse(tokens_with_a_normal_python_class) expected_class_tokens = [ TokenInfo(type=1, string='class', start=(4, 0), end=(4, 5), line='class ImportantClass():\n'), TokenInfo(type=1, string='ImportantClass', start=(4, 6), end=(4, 20), line='class ImportantClass():\n'), TokenInfo(type=52, string='(', start=(4, 20), end=(4, 21), line='class ImportantClass():\n'), TokenInfo(type=52, string=')', start=(4, 21), end=(4, 22), line='class ImportantClass():\n'), TokenInfo(type=52, string=':', start=(4, 22), end=(4, 23), line='class ImportantClass():\n'), TokenInfo(type=4, string='\n', start=(4, 23), end=(4, 24), line='class ImportantClass():\n'), TokenInfo(type=5, string=' ', start=(5, 0), end=(5, 4), line=' """I\'m parsed correctly"""\n'), TokenInfo(type=3, string='"""I\'m parsed correctly"""', start=(5, 4), end=(5, 30), line=' """I\'m parsed correctly"""\n'), TokenInfo(type=4, string='\n', start=(5, 30), end=(5, 31), line=' """I\'m parsed correctly"""\n'), TokenInfo(type=55, string='\n', start=(6, 0), end=(6, 1), line='\n'), TokenInfo(type=6, string='', start=(10, 0), end=(10, 0), line='def another_func():\n'), ] assert module.children[1].tokens == expected_class_tokens
def test_class_tokens_are_put_in_class_block(): given = tokens_from_string('''\ def a_func(thing): pass class ImportantClass(): """I'm parsed correctly""" def __init__(self): pass def another_func(): pass ''') expected_tokens = _generate_node_tokens_from_string('''\ ## delete from test ## ## delete from test ## ## delete from test ## class ImportantClass(): """I'm parsed correctly""" ## delete from test ## ## delete from test ## ''') module = parse(given) assert module.children[1].tokens[:-1] == expected_tokens[:-1] # The dedent token (the last token) will look different because if way the test is generated assert module.children[1].tokens[-1] == TokenInfo( type=DEDENT, string='', start=(10, 0), end=(10, 0), line='def another_func():\n')
def test_function_tokens_are_put_in_function_blocks(): # given given = tokens_from_string('''\ from newer import better old = 'bad' def new_test(thing): from special_date_module import is_new return is_new(thing) class SneakyClass(): """just making sure nothing here show up where it shouldn't""" pass def old_test(thing): from special_date_module import is_old return is_old(thing) good = new_test(thing) and not old_test(thing) ''') expected_function_1_tokens = _generate_node_tokens_from_string('''\ ## delete from test ## ## delete from test ## ## delete from test ## def new_test(thing): from special_date_module import is_new return is_new(thing) ''') func_block_1 = _build_func_block(expected_function_1_tokens) expected_function_2_tokens = _generate_node_tokens_from_string('''\ ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## def old_test(thing): from special_date_module import is_old return is_old(thing) ''') func_block_2 = _build_func_block(expected_function_2_tokens) # when module = parse(given) # then assert module.children[0].tokens[:-1] == func_block_1.tokens[:-1] # The dedent token (the last token) will look different because if way the test is generated assert module.children[0].tokens[-1] == TokenInfo(type=DEDENT, string='', start=(8, 0), end=(8, 0), line='class SneakyClass():\n') assert module.children[2].tokens[:-1] == func_block_2.tokens[:-1] # The dedent token (the last token) will look different because if way the test is generated assert module.children[2].tokens[-1] == TokenInfo(type=DEDENT, string='', start=(16, 0), end=(16, 0), line='good = new_test(thing) and not old_test(thing)\n')
def test_function_tokens_are_put_in_function_blocks(): # given given = tokens_from_string('''\ from newer import better old = 'bad' def new_test(thing): from special_date_module import is_new return is_new(thing) class SneakyClass(): """just making sure nothing here show up where it shouldn't""" pass def old_test(thing): from special_date_module import is_old return is_old(thing) good = new_test(thing) and not old_test(thing) ''') expected_function_1_tokens = _generate_node_tokens_from_string('''\ ## delete from test ## ## delete from test ## ## delete from test ## def new_test(thing): from special_date_module import is_new return is_new(thing) ''') func_block_1 = _build_func_block(expected_function_1_tokens) expected_function_2_tokens = _generate_node_tokens_from_string('''\ ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## ## delete from test ## def old_test(thing): from special_date_module import is_old return is_old(thing) ''') func_block_2 = _build_func_block(expected_function_2_tokens) # when module = parse(given) # then assert module.children[0].tokens[:-1] == func_block_1.tokens[:-1] # The dedent token (the last token) will look different because if way the test is generated assert module.children[0].tokens[-1] == TokenInfo( type=DEDENT, string='', start=(8, 0), end=(8, 0), line='class SneakyClass():\n') assert module.children[2].tokens[:-1] == func_block_2.tokens[:-1] # The dedent token (the last token) will look different because if way the test is generated assert module.children[2].tokens[-1] == TokenInfo( type=DEDENT, string='', start=(16, 0), end=(16, 0), line='good = new_test(thing) and not old_test(thing)\n')