file_helper.remove('test_data/main') file_helper.rmtree('test_data/test') file_helper.remove('test_data/test2') ctx.subpake('test_data/subpake/pakefile.py', 'clean') file_helper.rmtree('test_data/subpake_copy') file_helper.glob_remove_dirs('test_data/glob_and_pattern/delete_me_dir*') file_helper.rmtree('test_data/directory_create_test') if __name__ == '__main__': pake.run(pk, tasks=[print_define, all]) exit(0) class IntegrationTest(unittest.TestCase): def _check_outputs(self, exist=True): fun = self.assertTrue if exist else self.assertFalse fun(os.path.exists(os.path.join(script_dir, "test_data", "main"))) fun( os.path.exists( os.path.join(script_dir, "test_data", "directory_create_test"))) fun(
@pk.task(build_image) def create_volume(ctx): if on_windows(): if not docker_volume_exists(WIN_VOLUME): ctx.call('docker', 'volume', 'create', WIN_VOLUME) @pk.task(create_volume) def shell(ctx): """Run an interactive shell inside the container.""" if on_windows(): pake.FileHelper().makedirs('artifacts') run_docker(enter_to_shell=True) @pk.task(create_volume) def build(ctx): """Build Firestorm Viewer inside the container.""" if on_windows(): pake.FileHelper().makedirs('artifacts') run_docker(enter_to_shell=False) pake.run(pk, tasks=build)
assert pk.get_define('TEST_EXPORT1') == [1, 'te"st', [3, 4, "test'test"]] assert pk.get_define('TEST_EXPORT2') == { 0: 1, 1: 'te"st', 2: [3, 4, "test'test"] } assert pk.get_define('TEST_EXPORT3') == {1, 'te"st', 3, 4, "test'test"} assert pk.get_define('TEST_EXPORT4') == (1, 'te"st', [3, 4, "test'test"]) assert pk.get_define('TEST_EXPORT5') == '' @pk.task(i="test.c", o="test.o") def build(ctx): file_helper = pake.FileHelper(ctx) file_helper.touch(ctx.outputs[0]) ctx.print(ctx.inputs[0]) @pk.task def clean(ctx): for i in glob.glob("*.o"): ctx.print('Removing: {}'.format(i)) os.unlink(i) pake.run(pk)
@pk.task(i=pake.glob('src/*.c'), o=pake.pattern(os.path.join(obj_dir, '%' + obj_ext))) def compile_c(ctx): file_helper = pake.FileHelper(ctx) file_helper.makedirs(obj_dir) compiler_args = ([compiler, cc_flags, i, '-o', o] for i, o in ctx.outdated_pairs) sync_call = partial(ctx.call, collect_output=pk.max_jobs > 1) with ctx.multitask() as mt: list(mt.map(sync_call, compiler_args)) @pk.task(compile_asm, compile_c, o=exe_target) def build_example(ctx): file_helper = pake.FileHelper(ctx) file_helper.makedirs(bin_dir) ctx.call(compiler, link_flags, ctx.dependency_outputs, asm_lib_path, '-o', exe_target) @pk.task def clean(ctx): file_helper = pake.FileHelper(ctx) file_helper.rmtree('bin') file_helper.rmtree('obj') pake.run(pk, tasks=build_example)
def test_run_helper(dry_run=False): nonlocal self pake.de_init(clear_conf=False) if dry_run: pk = pake.init(args=['--dry-run']) else: pk = pake.init() # No tasks defined self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_DEFINED) @pk.task def task_one(): raise Exception() # No tasks specified self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_SPECIFIED) # Undefined task self.assertEqual(pake.run(pk, tasks='undefined', call_exit=False), returncodes.UNDEFINED_TASK) if not dry_run: # Exception in task self.assertEqual( pake.run(pk, tasks='task_one', call_exit=False), returncodes.TASK_EXCEPTION) @pk.task(i='IDontExist.nope', o='nada') def task_two(): pass # Input file not found self.assertEqual(pake.run(pk, tasks='task_two', call_exit=False), returncodes.TASK_INPUT_NOT_FOUND) @pk.task(i='IDontExist.nope') def task_three(): pass # Missing output file self.assertEqual(pake.run(pk, tasks='task_three', call_exit=False), returncodes.TASK_OUTPUT_MISSING) # ======== Cover Subpake and Call exception propagation @pk.task def task_four(ctx): ctx.subpake(os.path.join(script_dir, 'throw.py')) @pk.task def task_five(ctx): # execute with the current interpreter ctx.call(sys.executable, os.path.join(script_dir, 'throw.py')) if not dry_run: # Because 'throw.py' runs but throws an exception self.assertEqual( pake.run(pk, tasks='task_four', call_exit=False), returncodes.SUBPAKE_EXCEPTION) # Same thing, except differentiate as a task subprocess exception self.assertEqual( pake.run(pk, tasks=task_five, call_exit=False), returncodes.TASK_SUBPROCESS_EXCEPTION)
def test_run(self): pake.de_init(clear_conf=False) pk = pake.init() # should still be parsed and the object available, even with no arguments passed self.assertTrue(pake.arguments.args_are_parsed()) run_count = 0 @pk.task def task_one(ctx): nonlocal run_count run_count += 1 @pk.task def task_two(ctx): nonlocal run_count run_count += 1 self.assertEqual(pk.task_count, 2) pake.run(pk, tasks=[task_one, 'task_two']) self.assertEqual(pk.task_count, 2) self.assertEqual(pk.run_count, run_count) with self.assertRaises(ValueError): # Because jobs <= 1 pake.run(pk, tasks=[task_one, 'task_two'], jobs=0) with self.assertRaises(ValueError): # Because jobs <= 1 pake.run(pk, tasks=[task_one, 'task_two'], jobs=-1) # =========== def test_run_helper(dry_run=False): nonlocal self pake.de_init(clear_conf=False) if dry_run: pk = pake.init(args=['--dry-run']) else: pk = pake.init() # No tasks defined self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_DEFINED) @pk.task def task_one(): raise Exception() # No tasks specified self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_SPECIFIED) # Undefined task self.assertEqual(pake.run(pk, tasks='undefined', call_exit=False), returncodes.UNDEFINED_TASK) if not dry_run: # Exception in task self.assertEqual( pake.run(pk, tasks='task_one', call_exit=False), returncodes.TASK_EXCEPTION) @pk.task(i='IDontExist.nope', o='nada') def task_two(): pass # Input file not found self.assertEqual(pake.run(pk, tasks='task_two', call_exit=False), returncodes.TASK_INPUT_NOT_FOUND) @pk.task(i='IDontExist.nope') def task_three(): pass # Missing output file self.assertEqual(pake.run(pk, tasks='task_three', call_exit=False), returncodes.TASK_OUTPUT_MISSING) # ======== Cover Subpake and Call exception propagation @pk.task def task_four(ctx): ctx.subpake(os.path.join(script_dir, 'throw.py')) @pk.task def task_five(ctx): # execute with the current interpreter ctx.call(sys.executable, os.path.join(script_dir, 'throw.py')) if not dry_run: # Because 'throw.py' runs but throws an exception self.assertEqual( pake.run(pk, tasks='task_four', call_exit=False), returncodes.SUBPAKE_EXCEPTION) # Same thing, except differentiate as a task subprocess exception self.assertEqual( pake.run(pk, tasks=task_five, call_exit=False), returncodes.TASK_SUBPROCESS_EXCEPTION) test_run_helper() test_run_helper(True)
def test_show_header(self): # This test verifies that the pake.Pake.show_task_headers option # and the show_header parameter of pk.add_task and pk.task # are working correctly in conjunction with each other with tempfile.TemporaryFile(mode='w+') as pk_stdout: pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) self.assertTrue(pk.show_task_headers) @pk.task def test_task(ctx): # I will print a header by default pass pake.run(pk, tasks=test_task) self.assertGreater( pk_stdout.tell(), 0, msg='Task with show_header=None (default value) did not write ' 'a header when pk.show_task_headers=True.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) self.assertTrue(pk.show_task_headers) @pk.task(show_header=False) def test_task(ctx): # I will print nothing at all, # even if pk.show_task_headers is True pass pake.run(pk, tasks=test_task) self.assertEqual( pk_stdout.tell(), 0, msg= 'Task with show_header=False wrote to header to pakes output ' 'when pk.show_task_headers=True.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) pk.show_task_headers = False @pk.task def test_task(ctx): # I will print nothing at all, # because pk.show_task_headers is False # and it was not overridden with show_header=True pass pake.run(pk, tasks=test_task) self.assertEqual( pk_stdout.tell(), 0, msg='Task with show_header=None (default value) wrote a header ' 'to pakes output when pk.show_task_headers=False.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) pk.show_task_headers = False @pk.task(show_header=True) def test_task(ctx): # I will print a header regardless # of pk.show_task_headers being False, # because show_header has been forced to True # on the task pass pake.run(pk, tasks=test_task) self.assertGreater( pk_stdout.tell(), 0, msg='Task with show_header=True did not write header to pakes ' 'output when pk.show_task_headers=False.')
@pk.task def clean(ctx): """Clean the library""" file_helper = pake.FileHelper(ctx) file_helper.remove(os.path.join(inc_dir, 'libasm_io_defines.inc')) file_helper.remove(os.path.join(inc_dir, 'libasm_io_libc_call.inc')) file_helper.remove(os.path.join(inc_dir, 'libasm_io_cdecl.inc')) file_helper.rmtree('bin') file_helper.rmtree('obj') @pk.task def clean_examples(ctx): """Clean the library examples.""" subpake_args = (['examples/pakefile.py', 'clean', '-C', d] for d in glob.glob('examples/*/')) sync_subpake = partial(ctx.subpake, collect_output=pk.max_jobs > 1) with ctx.multitask() as mt: list(mt.map(sync_subpake, subpake_args)) @pk.task(clean, clean_examples) def clean_all(): """Clean the library and library examples.""" pass pake.run(pk, tasks=build_library)
import sys import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert( 1, os.path.abspath(os.path.join(script_dir, os.path.join('..', '..')))) import pake pk = pake.init() RETURNCODE = pk.get_define('RETURNCODE', 0) TERMINATE = pk.get_define('TERMINATE', False) @pk.task def default(ctx): if TERMINATE: pk.terminate(RETURNCODE) else: exit(RETURNCODE) pake.run(pk, tasks=default)