def test_subpake_depth(self): assert_depth_script = os.path.join(script_dir, 'assert_subpake_depth.py') pake.de_init(clear_conf=False) with self.assertRaises(SystemExit) as err: pake.subpake(os.path.join(script_dir, 'throw.py')) with self.assertRaises(pake.SubpakeException) as err: pake.subpake(os.path.join(script_dir, 'throw.py'), call_exit=False) # If pake is not initialized, there is no depth tracking try: pake.subpake(assert_depth_script, '-D', 'DEPTH=0', call_exit=False) except pake.SubpakeException as err: self.fail( 'subpake depth=0 assertion failed, return code {}.'.format( err.returncode)) # Pake must be initialized for depth tracking pake.init() try: pake.subpake(assert_depth_script, '-D', 'DEPTH=1', call_exit=False) except pake.SubpakeException as err: self.fail( 'subpake depth=1 assertion failed, return code {}.'.format( err.returncode))
def test_run_changedir(self): pake.de_init(clear_conf=False) start_dir = os.getcwd() dest_dir = os.path.abspath(os.path.join(script_dir, '..')) # Tell pake to change directories on init pk = pake.init(args=['-C', dest_dir]) self.assertEqual(dest_dir, os.getcwd()) self.assertEqual(pake.get_init_dir(), start_dir) @pk.task def check_dir(ctx): if dest_dir != os.getcwd(): raise Exception() self.assertEqual(pake.run(pk, tasks=check_dir, call_exit=False), returncodes.SUCCESS) # Should be back to normal after run self.assertEqual(start_dir, os.getcwd()) # ========================== # Check that its forced quietly before run, even if it is changed prior pake.de_init(clear_conf=False) pk = pake.init(args=['-C', dest_dir]) self.assertEqual(dest_dir, os.getcwd()) @pk.task def check_dir(ctx): if dest_dir != os.getcwd(): raise Exception() # Try changing it back... os.chdir(script_dir) # Directory should change here to dest_dir self.assertEqual(pake.run(pk, tasks=check_dir, call_exit=False), returncodes.SUCCESS) # Should be back to normal after run self.assertEqual(start_dir, os.getcwd())
def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException)
def assert_bad_args(*args): pake.de_init(clear_conf=False) try: pake.init(args=list(args)) except SystemExit as err: exit_init_code = err.code self.assertEqual( exit_init_code, returncodes.BAD_ARGUMENTS, msg= 'Passed bad command line arguments, exited with 0 (success).' ) else: self.fail( 'Bad command line arguments were passed to pake.init and it did not exit!' )
def test_normal_exception(self): pake.de_init(clear_conf=False) pk = pake.init() class TestException(Exception): def __init__(self): pass def raise_test(unused): raise TestException() @pk.task def test_submit(ctx): with ctx.multitask() as mt: mt.submit(raise_test, None) @pk.task def test_map(ctx): with ctx.multitask() as mt: mt.map(raise_test, range(0, 5)) with self.assertRaises(pake.TaskException) as te: pk.run(tasks=test_submit) self.assertEqual(type(te.exception.exception), TestException) self.assertEqual( type(te.exception.exception), TestException, msg='test_multitask_exceptions.py: ctx.multitask with mt.submit did ' 'not propagate the correct exception type, expected ' '"TestException", got "{}"'.format(type(te.exception.exception))) with self.assertRaises(pake.TaskException) as te: pk.run(tasks=test_map) self.assertEqual( type(te.exception.exception), TestException, msg='test_multitask_exceptions.py: ctx.multitask with mt.map did ' 'not propagate the correct exception type, expected ' '"TestException", got "{}"'.format(type(te.exception.exception)))
def assert_exit_code(*args, no_tasks=False, all_up_to_date=False, code=returncodes.SUCCESS): pake.de_init(clear_conf=False) try: pk = pake.init(args=list(args)) if no_tasks is False: # Make all tasks up to date, pake does not care # if the input file is the same as the output # it just compares the modification time of whatever is there in_out = os.path.join(script_dir, 'test_data', 'in1') if all_up_to_date else None pk.add_task('dummy', lambda ctx: None, inputs=in_out, outputs=in_out) pk.add_task('dummy2', lambda ctx: None, inputs=in_out, outputs=in_out) got_code = pake.run(pk) self.assertEqual( got_code, code, msg= 'Command line argument resulted in an unexpected exit code. ' 'expected {}, but got {}.'.format(code, got_code)) except SystemExit as err: self.assertEqual( err.code, code, msg= 'Command line argument resulted in an unexpected exit code. ' 'expected {}, but got {}.'.format(code, err.code))
def _is_running_exit_test(self, jobs, exit_method): # Test that the state of pk.is_running and pk.threadpool # are correct even after pake experiences an exception inside # of a task class TestException(Exception): def __init__(self, *args): super().__init__(*args) pake.de_init(clear_conf=False) pk = pake.init() self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None) @pk.task def task_a(ctx): exit_method(pk) def task_b(ctx): pass def task_c(ctx): pass try: pk.run(tasks=[task_a, task_b, task_c], jobs=jobs) except pake.TaskExitException as err: if not isinstance(err.exception, SystemExit): self.fail( 'Unexpected exception "{}" in pake.is_running exit test!, ' 'expected SystemExit.'.format( pake.util.qualified_name(err.__name__))) else: self.fail( 'Expected pake.TaskExitException, no exception was raised!') self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None)
def test_task_terminate_exception(self): pake.de_init(clear_conf=False) pk = pake.init() @pk.task def test(ctx): time.sleep(0.5) pk.terminate(return_code=100) @pk.task def test2(ctx): time.sleep(0.3) @pk.task def test3(ctx): time.sleep(0.2) # Make sure that terminate() effects even multithreaded builds # The return code with call_exit=False should match the exit code in the task self.assertEqual( pake.run(pk, tasks=[test2, test3, test], jobs=10, call_exit=False), 100) self.assertEqual( pake.run(pk, tasks=[test2, test3, test], call_exit=False), 100) with self.assertRaises(pake.TaskExitException) as exc: pk.run(tasks=[test2, test3, test]) self.assertEqual(type(exc.exception.exception), pake.TerminateException) self.assertEqual(exc.exception.task_name, 'test') self.assertEqual(exc.exception.return_code, 100)
def _is_running_test(self, jobs=1): # Test that the is_running and threadpool properties # of the Pake object maintain the correct state class TestException(Exception): def __init__(self, *args): super().__init__(*args) pake.de_init(clear_conf=False) pk = pake.init() self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None) @pk.task def task_a(ctx): if not pk.is_running: raise TestException( 'Test failed, pk.is_running is False while pake is running.' ) if jobs == 1 and pk.threadpool: raise TestException( 'Test failed, pk.threadpool is NOT None when jobs == 1.') if jobs > 1 and not pk.threadpool: raise TestException( 'Test failed, pk.threadpool is None when jobs > 1.') try: pk.run(tasks=[task_a], jobs=jobs) except pake.TaskException as err: self.fail(str(err.exception)) self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None)
def collect_output_test_helper(jobs): pk = pake.init() @pk.task def call1(ctx): ctx.call( sys.executable, os.path.join(script_dir, 'throw.py'), collect_output=True) # Raise pake.TaskSubprocessException # Test pake.TaskSubprocessException propagation with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call1, jobs=jobs) self.assertEqual(type(exc.exception.exception), pake.TaskSubprocessException) self.assertEqual(exc.exception.exception.output_stream.tell(), 0) # Just to test for exceptions writing the TaskSubprocessException.output prop exc.exception.exception.write_info(pake.conf.stdout) self.assertEqual(exc.exception.exception.output_stream, None)
def test_run(self): pake.de_init(clear_conf=False) pk = pake.init() # should still be parsed and the object available, even with no arguments passed self.assertTrue(pake.arguments.args_are_parsed()) run_count = 0 @pk.task def task_one(ctx): nonlocal run_count run_count += 1 @pk.task def task_two(ctx): nonlocal run_count run_count += 1 self.assertEqual(pk.task_count, 2) pake.run(pk, tasks=[task_one, 'task_two']) self.assertEqual(pk.task_count, 2) self.assertEqual(pk.run_count, run_count) with self.assertRaises(ValueError): # Because jobs <= 1 pake.run(pk, tasks=[task_one, 'task_two'], jobs=0) with self.assertRaises(ValueError): # Because jobs <= 1 pake.run(pk, tasks=[task_one, 'task_two'], jobs=-1) # =========== def test_run_helper(dry_run=False): nonlocal self pake.de_init(clear_conf=False) if dry_run: pk = pake.init(args=['--dry-run']) else: pk = pake.init() # No tasks defined self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_DEFINED) @pk.task def task_one(): raise Exception() # No tasks specified self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_SPECIFIED) # Undefined task self.assertEqual(pake.run(pk, tasks='undefined', call_exit=False), returncodes.UNDEFINED_TASK) if not dry_run: # Exception in task self.assertEqual( pake.run(pk, tasks='task_one', call_exit=False), returncodes.TASK_EXCEPTION) @pk.task(i='IDontExist.nope', o='nada') def task_two(): pass # Input file not found self.assertEqual(pake.run(pk, tasks='task_two', call_exit=False), returncodes.TASK_INPUT_NOT_FOUND) @pk.task(i='IDontExist.nope') def task_three(): pass # Missing output file self.assertEqual(pake.run(pk, tasks='task_three', call_exit=False), returncodes.TASK_OUTPUT_MISSING) # ======== Cover Subpake and Call exception propagation @pk.task def task_four(ctx): ctx.subpake(os.path.join(script_dir, 'throw.py')) @pk.task def task_five(ctx): # execute with the current interpreter ctx.call(sys.executable, os.path.join(script_dir, 'throw.py')) if not dry_run: # Because 'throw.py' runs but throws an exception self.assertEqual( pake.run(pk, tasks='task_four', call_exit=False), returncodes.SUBPAKE_EXCEPTION) # Same thing, except differentiate as a task subprocess exception self.assertEqual( pake.run(pk, tasks=task_five, call_exit=False), returncodes.TASK_SUBPROCESS_EXCEPTION) test_run_helper() test_run_helper(True)
def _basic_behavior_test(self, jobs): pake.de_init(clear_conf=False) pk = pake.init() ran = False # runs because 'test' is missing @pk.task(i=[], o=['test']) def task_a(ctx): nonlocal ran, self ran = True self.assertTrue(len(ctx.outputs) == 1) self.assertTrue(len(ctx.outdated_outputs) == 1) self.assertTrue(ctx.outdated_outputs[0] == 'test') pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # runs because 'test' is missing @pk.task(o=['test']) def task_a(ctx): nonlocal ran, self ran = True self.assertTrue(len(ctx.outputs) == 1) self.assertTrue(len(ctx.outdated_outputs) == 1) self.assertTrue(ctx.outdated_outputs[0] == 'test') pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Always runs @pk.task def task_a(ctx): nonlocal ran, self self.assertTrue(len(ctx.outputs) == 0) self.assertTrue(len(ctx.inputs) == 0) self.assertTrue(len(ctx.outdated_outputs) == 0) self.assertTrue(len(ctx.outdated_inputs) == 0) ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Always runs @pk.task(i=None, o=None) def task_a(ctx): nonlocal ran, self self.assertTrue(len(ctx.outputs) == 0) self.assertTrue(len(ctx.inputs) == 0) self.assertTrue(len(ctx.outdated_outputs) == 0) self.assertTrue(len(ctx.outdated_inputs) == 0) ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Wont ever run @pk.task(i=[], o=[]) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertFalse(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Wont ever run @pk.task(i=pake.glob('*.theres_nothing_named_this_in_the_directory'), o=pake.pattern('%.o')) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertFalse(ran)
import pake import os import getpass import shutil from pake import process pk = pake.init(show_task_headers=False) IMAGE_NAME = pk.get_define('IMAGE', 'firestorm_build_env_ubuntu_16.04') IMAGE_VERSION = pk.get_define('IMAGE_VERSION', '0.3.0') WIN_VOLUME = pk.get_define('WIN_VOLUME', 'firestorm_build_env_volume') ENTRY_SCRIPT = 'src/entry.sh' IMAGE = '{}:{}'.format(IMAGE_NAME, IMAGE_VERSION) import subprocess def on_windows(): return os.name == 'nt' def docker_image_exists(name): return process.call('docker', 'image', 'inspect', name, stdout=process.DEVNULL,
import sys import unittest import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert( 1, os.path.abspath(os.path.join(script_dir, os.path.join('..', '..')))) import pake # Force the test to occur in the correct place os.chdir(script_dir) pk = pake.init() pake.export('TEST_EXPORT', 'test"test') pake.export('TEST_EXPORT1', [1, 'te"st', [3, 4, "test'test"]]) pake.export('TEST_EXPORT2', {0: 1, 1: 'te"st', 2: [3, 4, "test'test"]}) pake.export('TEST_EXPORT3', {1, 'te"st', 3, 4, "test'test"}) pake.export('TEST_EXPORT4', (1, 'te"st', [3, 4, "test'test"])) pake.export('TEST_EXPORT5', '') @pk.task(i='test_data/one.c', o='test_data/do_single.o')
import sys import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert( 1, os.path.abspath(os.path.join(script_dir, os.path.join('..', '..')))) import pake import pake.util # init once for defines/exports, de_init before next init # STDIN defines from pake.subpake are cached, pake.de_init # does not clear them defines = pake.init() assert '__PAKE_SYNC_OUTPUT' in os.environ expected_value = defines['SYNC_OUTPUT_EXPECTED_VALUE'] if defines.has_define('INIT_SYNC_OUTPUT_VALUE'): init_sync_output = defines['INIT_SYNC_OUTPUT_VALUE'] pake.de_init() assert '__PAKE_SYNC_OUTPUT' not in os.environ pk = pake.init(sync_output=init_sync_output) assert '__PAKE_SYNC_OUTPUT' in os.environ else: pake.de_init()
def _exceptions_test(self, jobs): pake.de_init(clear_conf=False) pk = pake.init() # MissingOutputFilesException, even if 'test' does not exist on disk @pk.task(i=['test'], o=[]) def task_a(ctx): pass with self.assertRaises(pake.MissingOutputsException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # MissingOutputFilesException, even if 'test' does not exist on disk @pk.task(i=['test']) def task_a(ctx): pass with self.assertRaises(pake.MissingOutputsException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # MissingOutputFilesException, even if 'test' and 'test2' do not exist on disk @pk.task(i=['test', 'test2']) def task_a(ctx): pass with self.assertRaises(pake.MissingOutputsException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # InputFileNotFoundException, since usage is valid but a.c is missing @pk.task(i=['a.c'], o=['a.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case above, but this time the output exists @pk.task(i=['a.c'], o=os.path.join(script_dir, 'test_data', 'out1')) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but with multiple inputs @pk.task(i=['a.c', 'b.c'], o=['a.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but the output exists this time around. @pk.task(i=['a.c', 'b.c'], o=os.path.join(script_dir, 'test_data', 'out1')) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but with multiple inputs and outputs, # one of the outputs exists this time around. @pk.task(i=['a.c', 'b.c'], o=['a.o', os.path.join(script_dir, 'test_data', 'out1')]) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but this time both outputs don't exist @pk.task(i=['a.c', 'b.c'], o=['a.o', 'b.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but this time there are # more outputs than inputs @pk.task(i=['a.c', 'b.c'], o=['a.o', 'b.o', 'c.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but this time there are # more inputs than outputs @pk.task(i=['a.c', 'b.c', 'c.o'], o=['a.o', 'b.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs)
def m_init(): if call_init: pake.de_init(clear_conf=False) pake.init()
def test_init(self): pake.de_init(clear_conf=False) self.assertFalse(pake.is_init()) with self.assertRaises(pake.PakeUninitializedException): pake.terminate(pake.Pake(), 0) pk = pake.init() self.assertTrue(pk.stdout is pake.conf.stdout) self.assertTrue(pake.is_init()) self.assertEqual(pake.get_subpake_depth(), 0) self.assertEqual(pake.get_max_jobs(), 1) self.assertEqual(pk.task_count, 0) self.assertEqual(len(pk.task_contexts), 0) this_file = os.path.abspath(__file__) self.assertEqual(pake.get_init_file(), this_file) self.assertEqual(pake.get_init_dir(), os.getcwd()) with self.assertRaises(ValueError): pake.program.run(None) pake.de_init(clear_conf=False) pk = pake.init(args=['--jobs', '10']) self.assertEqual(pake.get_max_jobs(), 10) pake.de_init(clear_conf=False) with self.assertRaises(SystemExit) as cm: pake.init(args=['-D', 'TEST={ I am a bad define']) self.assertEqual(cm.exception.code, returncodes.BAD_DEFINE_VALUE) # These should not throw pake.init(args=['-D', 'TEST= {"I am a good define" } ']) pake.init(args=['-D', 'TEST= "I am a good define" ']) pake.init(args=['-D', 'TEST= 1 am a good define ']) # they are strings pake.init(args=['-D', 'TEST= 1 2 3 ']) pake.init(args=['-D', 'TEST=1 2 3 ']) pake.de_init(clear_conf=False)
def test_subprocess_task_exceptions(self): # ============================= pk = pake.init() @pk.task def subpake1(ctx): ctx.subpake() # ValueError @pk.task def subpake2(ctx): ctx.subpake('missing file') # FileNotFound with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=subpake1) self.assertEqual(type(exc.exception.exception), ValueError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=subpake1, jobs=10) self.assertEqual(type(exc.exception.exception), ValueError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=subpake2) self.assertEqual(type(exc.exception.exception), FileNotFoundError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=subpake2, jobs=10) self.assertEqual(type(exc.exception.exception), FileNotFoundError) # ============================= def subprocess_test_helper(method): pk = pake.init() @pk.task def call1(ctx): getattr(ctx, method)() # ValueError @pk.task def call2(ctx): getattr(ctx, method)('missing file') # FileNotFound @pk.task def call3(ctx): call = getattr(ctx, method) call(sys.executable, os.path.join( script_dir, 'throw.py')) # Raise pake.TaskSubprocessException @pk.task def call4(ctx): call = getattr(ctx, method) call(sys.executable, os.path.join(script_dir, 'throw.py'), ignore_errors=True) # ignore exception with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call1) self.assertEqual(type(exc.exception.exception), ValueError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call1, jobs=10) self.assertEqual(type(exc.exception.exception), ValueError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call2) self.assertEqual(type(exc.exception.exception), FileNotFoundError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call2, jobs=10) self.assertEqual(type(exc.exception.exception), FileNotFoundError) # Test pake.TaskSubprocessException propagation with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call3) self.assertEqual(type(exc.exception.exception), pake.TaskSubprocessException) exc.exception.exception.write_info( pake.conf.stdout) # avoid unit test resource warning with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call3, jobs=10) self.assertEqual(type(exc.exception.exception), pake.TaskSubprocessException) exc.exception.exception.write_info( pake.conf.stdout) # avoid unit test resource warning try: pk.run(tasks=call4) except pake.TaskException: self.fail( 'TaskContext.{} threw on non zero return code with ignore_errors=True' .format(method)) try: pk.run(tasks=call4, jobs=10) except pake.TaskException: self.fail( 'TaskContext.{} threw on non zero return code with ignore_errors=True. with pk.run(jobs=10)' .format(method)) subprocess_test_helper('call') subprocess_test_helper('check_call') subprocess_test_helper('check_output') def collect_output_test_helper(jobs): pk = pake.init() @pk.task def call1(ctx): ctx.call( sys.executable, os.path.join(script_dir, 'throw.py'), collect_output=True) # Raise pake.TaskSubprocessException # Test pake.TaskSubprocessException propagation with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call1, jobs=jobs) self.assertEqual(type(exc.exception.exception), pake.TaskSubprocessException) self.assertEqual(exc.exception.exception.output_stream.tell(), 0) # Just to test for exceptions writing the TaskSubprocessException.output prop exc.exception.exception.write_info(pake.conf.stdout) self.assertEqual(exc.exception.exception.output_stream, None) collect_output_test_helper(1) collect_output_test_helper(5)
def subprocess_test_helper(method): pk = pake.init() @pk.task def call1(ctx): getattr(ctx, method)() # ValueError @pk.task def call2(ctx): getattr(ctx, method)('missing file') # FileNotFound @pk.task def call3(ctx): call = getattr(ctx, method) call(sys.executable, os.path.join( script_dir, 'throw.py')) # Raise pake.TaskSubprocessException @pk.task def call4(ctx): call = getattr(ctx, method) call(sys.executable, os.path.join(script_dir, 'throw.py'), ignore_errors=True) # ignore exception with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call1) self.assertEqual(type(exc.exception.exception), ValueError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call1, jobs=10) self.assertEqual(type(exc.exception.exception), ValueError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call2) self.assertEqual(type(exc.exception.exception), FileNotFoundError) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call2, jobs=10) self.assertEqual(type(exc.exception.exception), FileNotFoundError) # Test pake.TaskSubprocessException propagation with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call3) self.assertEqual(type(exc.exception.exception), pake.TaskSubprocessException) exc.exception.exception.write_info( pake.conf.stdout) # avoid unit test resource warning with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=call3, jobs=10) self.assertEqual(type(exc.exception.exception), pake.TaskSubprocessException) exc.exception.exception.write_info( pake.conf.stdout) # avoid unit test resource warning try: pk.run(tasks=call4) except pake.TaskException: self.fail( 'TaskContext.{} threw on non zero return code with ignore_errors=True' .format(method)) try: pk.run(tasks=call4, jobs=10) except pake.TaskException: self.fail( 'TaskContext.{} threw on non zero return code with ignore_errors=True. with pk.run(jobs=10)' .format(method))
def test_task_exceptions(self): # ============================= pk = pake.init() @pk.task def c_task(ctx): pass @pk.task def b_task(ctx): raise Exception() @pk.task(b_task, c_task) def a_task(ctx): pass with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=a_task) self.assertEqual(type(exc.exception.exception), Exception) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=a_task, jobs=10) self.assertEqual(type(exc.exception.exception), Exception) def raise_exception(*args): raise Exception() # ============================= pk = pake.init() @pk.task def c_task(ctx): pass @pk.task def b_task(ctx): with ctx.multitask() as mt: mt.submit(raise_exception) @pk.task(b_task, c_task) def a_task(ctx): pass with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=a_task) self.assertEqual(type(exc.exception.exception), Exception) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=a_task, jobs=10) self.assertEqual(type(exc.exception.exception), Exception) # ============================= pk = pake.init() @pk.task def c_task(ctx): pass @pk.task def b_task(ctx): with ctx.multitask() as mt: list(mt.map(raise_exception, ['test'])) @pk.task(b_task, c_task) def a_task(ctx): pass with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=a_task) self.assertEqual(type(exc.exception.exception), Exception) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=a_task, jobs=10) self.assertEqual(type(exc.exception.exception), Exception)
def _existing_files_test(self, jobs): # Test file comparisons when outputs already exist in1 = os.path.join(script_dir, 'test_data', 'in1') in2 = os.path.join(script_dir, 'test_data', 'in2') out1 = os.path.join(script_dir, 'test_data', 'out1') out2 = os.path.join(script_dir, 'test_data', 'out2') # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Make all the modification times ancient os.utime(in1, (0, 0)) os.utime(in2, (0, 0)) os.utime(out1, (0, 0)) os.utime(out2, (0, 0)) # Make an input recent pake.FileHelper().touch(in1) # This should run @pk.task(i=[in1, in2], o=out1) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ # Same test as above, except with multiple outputs pake.de_init(clear_conf=False) pk = pake.init() ran = False # Make all the modification times ancient os.utime(in1, (0, 0)) os.utime(in2, (0, 0)) os.utime(out1, (0, 0)) os.utime(out2, (0, 0)) # Make an input recent pake.FileHelper().touch(in2) # This should run @pk.task(i=[in1, in2], o=[out1, out2]) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran)
def test_registration_and_run(self): pake.de_init(clear_conf=False) pk = pake.init() def undefined_task(): pass with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_name(undefined_task) with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_context(undefined_task) with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_name("undefined_task") with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_context("undefined_task") script_path = os.path.dirname(os.path.abspath(__file__)) in1 = os.path.join(script_path, 'test_data', 'in1') # Does not need to exist, easier than dealing with # a full path. out1 = 'out1' pake.util.touch(in1) in2 = os.path.join(script_path, 'test_data', 'in2') # Does not need to exist either. out2 = 'out2' pake.util.touch(in2) @pk.task(o='dep_one.o') def dep_one(ctx): pass @pk.task(o=['dep_two.o', out2]) def dep_two(ctx): pass @pk.task(o='dep_three.o') def dep_three(ctx): pass @pk.task(dep_one, dep_two, i=in1, o=out1) def task_one(ctx): nonlocal self self.assertListEqual(ctx.inputs, [in1]) self.assertListEqual(ctx.outputs, [out1]) self.assertListEqual(ctx.outdated_inputs, [in1]) self.assertListEqual(ctx.outdated_outputs, [out1]) self.assertListEqual(list(ctx.outdated_pairs), [(in1, out1)]) # Check that the correct immediate dependency outputs are reported. self.assertCountEqual(['dep_one.o', 'dep_two.o', out2], ctx.dependency_outputs) dep_one_ctx = pk.get_task_context(dep_one) dep_two_ctx = pk.get_task_context(dep_two) # Check that the correct immediate dependencies are reported. self.assertCountEqual([dep_one_ctx, dep_two_ctx], ctx.dependencies) def other_task(ctx): nonlocal self self.assertListEqual(ctx.inputs, [in2]) self.assertListEqual(ctx.outputs, [out2]) self.assertListEqual(ctx.outdated_inputs, [in2]) self.assertListEqual(ctx.outdated_outputs, [out2]) self.assertListEqual(list(ctx.outdated_pairs), [(in2, out2)]) task_one_ctx = pk.get_task_context(task_one) dep_three_ctx = pk.get_task_context(dep_three) # Check that the correct immediate dependency outputs are reported. self.assertCountEqual(['dep_three.o', out1], ctx.dependency_outputs) # Check that the correct immediate dependencies are reported. self.assertCountEqual([task_one_ctx, dep_three_ctx], ctx.dependencies) ctx = pk.add_task('task_two', other_task, inputs=in2, outputs=out2, dependencies=[task_one, dep_three]) task_one_ctx = pk.get_task_context(task_one) dep_three_ctx = pk.get_task_context(dep_three) # Check that the correct immediate dependencies are reported. # ctx.dependencies should return a meaningful value outside of a task # as well as inside. That is not the case with ctx.dependency_outputs self.assertCountEqual([task_one_ctx, dep_three_ctx], ctx.dependencies) # Not available yet self.assertListEqual([], ctx.dependency_outputs) # Not available yet self.assertListEqual([], ctx.outputs) # Not available yet self.assertListEqual([], ctx.inputs) # Not available yet self.assertListEqual([], ctx.outdated_outputs) # Not available yet self.assertListEqual([], ctx.outdated_inputs) self.assertEqual(ctx.name, 'task_two') self.assertEqual(ctx, pk.get_task_context('task_two')) self.assertEqual(ctx, pk.get_task_context(other_task)) self.assertEqual(pk.get_task_context('task_one'), pk.get_task_context(task_one)) self.assertEqual(pk.get_task_name(task_one), 'task_one') self.assertEqual(pk.get_task_name(other_task), 'task_two') self.assertEqual(pk.task_count, 5) self.assertEqual(len(pk.task_contexts), 5) with self.assertRaises(pake.UndefinedTaskException): pk.get_task_context('undefined') with self.assertRaises(pake.UndefinedTaskException): pk.get_task_name('undefined') with self.assertRaises(ValueError): pk.get_task_name(1) with self.assertRaises(ValueError): pk.get_task_name(None) with self.assertRaises(pake.RedefinedTaskException): pk.add_task('task_one', task_one) with self.assertRaises(pake.RedefinedTaskException): pk.add_task('task_two', other_task) # Raises an exception if there is an issue # Makes this test easier to debug pk.run(tasks='task_two') with self.assertRaises(ValueError): # Because jobs <= 1 pk.run(tasks='task_two', jobs=-1) with self.assertRaises(ValueError): # Because jobs <= 1 pk.run(tasks='task_two', jobs=0) with self.assertRaises(ValueError): # Because tasks is None pk.run(tasks=None) with self.assertRaises(ValueError): # Because tasks is empty pk.run(tasks=[]) self.assertEqual(pake.run(pk, tasks=['task_two'], call_exit=False), 0) self.assertEqual(pk.run_count, 5)
def test_show_header(self): # This test verifies that the pake.Pake.show_task_headers option # and the show_header parameter of pk.add_task and pk.task # are working correctly in conjunction with each other with tempfile.TemporaryFile(mode='w+') as pk_stdout: pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) self.assertTrue(pk.show_task_headers) @pk.task def test_task(ctx): # I will print a header by default pass pake.run(pk, tasks=test_task) self.assertGreater( pk_stdout.tell(), 0, msg='Task with show_header=None (default value) did not write ' 'a header when pk.show_task_headers=True.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) self.assertTrue(pk.show_task_headers) @pk.task(show_header=False) def test_task(ctx): # I will print nothing at all, # even if pk.show_task_headers is True pass pake.run(pk, tasks=test_task) self.assertEqual( pk_stdout.tell(), 0, msg= 'Task with show_header=False wrote to header to pakes output ' 'when pk.show_task_headers=True.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) pk.show_task_headers = False @pk.task def test_task(ctx): # I will print nothing at all, # because pk.show_task_headers is False # and it was not overridden with show_header=True pass pake.run(pk, tasks=test_task) self.assertEqual( pk_stdout.tell(), 0, msg='Task with show_header=None (default value) wrote a header ' 'to pakes output when pk.show_task_headers=False.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) pk.show_task_headers = False @pk.task(show_header=True) def test_task(ctx): # I will print a header regardless # of pk.show_task_headers being False, # because show_header has been forced to True # on the task pass pake.run(pk, tasks=test_task) self.assertGreater( pk_stdout.tell(), 0, msg='Task with show_header=True did not write header to pakes ' 'output when pk.show_task_headers=False.')
def test_aggregate_exception(self): pake.de_init(clear_conf=False) pk = pake.init() class TestException(Exception): def __init__(self, exc_id): self.exc_id = exc_id def raise_test(exc_id): raise TestException(exc_id) test_submit_exc_count = 5 test_map_exc_count = 7 @pk.task def test_submit(ctx): # Throw some extra tasks in with ctx.multitask() as mt: mt.aggregate_exceptions = True for i in range(0, 10): if i < test_submit_exc_count: mt.submit(raise_test, i) else: mt.submit(lambda: None) @pk.task def test_map(ctx): # Test that the map function of the executor # aggregates exceptions. It is just using .submit # under the hood so it should be fine, test anyway with ctx.multitask(aggregate_exceptions=True) as mt: arguments = range(0, 18) def should_raise(argument): if argument < test_map_exc_count: raise_test(argument) mt.map(should_raise, arguments) # Assert that the correct amount of exceptions # were raised, and that their ID's were unique def all_unique(x): seen = set() return not any(i in seen or seen.add(i) for i in x) def assert_exception_count(task, count, jobs): task_name = pk.get_task_name(task) try: pk.run(tasks=task, jobs=jobs) except pake.TaskException as err: if isinstance(err.exception, pake.AggregateException): aggregate = err.exception self.assertEqual( len(aggregate.exceptions), count, msg= 'test_multitask_exceptions.py: Task Name: "{}", Expected {} ' 'exceptions to have been aggregated, got: {}'.format( task_name, count, len(aggregate.exceptions))) self.assertTrue( all_unique(i.exc_id for i in aggregate.exceptions), msg= 'test_multitask_exceptions.py: Task Name: "{}", aggregate ' 'exception contained the same exception more than once.' .format(task_name)) # test for exceptions aggregate.write_info() else: self.fail( msg= 'test_multitask_exceptions.py: Task Name: "{}", Expected a ' 'pake.AggregateException to be raised, got: {}'.format( task_name, err.exception)) else: self.fail( msg= 'test_multitask_exceptions.py: Task Name: "{}", Expected ' 'pake.AggregateException to be raised and cause a pake.TaskException' .format(task_name)) assert_exception_count(test_submit, 5, jobs=1) assert_exception_count(test_map, 7, jobs=1) assert_exception_count(test_submit, 5, jobs=10) assert_exception_count(test_map, 7, jobs=10)
def test_run_helper(dry_run=False): nonlocal self pake.de_init(clear_conf=False) if dry_run: pk = pake.init(args=['--dry-run']) else: pk = pake.init() # No tasks defined self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_DEFINED) @pk.task def task_one(): raise Exception() # No tasks specified self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_SPECIFIED) # Undefined task self.assertEqual(pake.run(pk, tasks='undefined', call_exit=False), returncodes.UNDEFINED_TASK) if not dry_run: # Exception in task self.assertEqual( pake.run(pk, tasks='task_one', call_exit=False), returncodes.TASK_EXCEPTION) @pk.task(i='IDontExist.nope', o='nada') def task_two(): pass # Input file not found self.assertEqual(pake.run(pk, tasks='task_two', call_exit=False), returncodes.TASK_INPUT_NOT_FOUND) @pk.task(i='IDontExist.nope') def task_three(): pass # Missing output file self.assertEqual(pake.run(pk, tasks='task_three', call_exit=False), returncodes.TASK_OUTPUT_MISSING) # ======== Cover Subpake and Call exception propagation @pk.task def task_four(ctx): ctx.subpake(os.path.join(script_dir, 'throw.py')) @pk.task def task_five(ctx): # execute with the current interpreter ctx.call(sys.executable, os.path.join(script_dir, 'throw.py')) if not dry_run: # Because 'throw.py' runs but throws an exception self.assertEqual( pake.run(pk, tasks='task_four', call_exit=False), returncodes.SUBPAKE_EXCEPTION) # Same thing, except differentiate as a task subprocess exception self.assertEqual( pake.run(pk, tasks=task_five, call_exit=False), returncodes.TASK_SUBPROCESS_EXCEPTION)
def _call_test(self, jobs): exit_10 = os.path.join(script_dir, 'exit_10.py') exit_0 = os.path.join(script_dir, 'exit_0.py') pake.de_init(clear_conf=False) pk = pake.init(args=['--jobs', str(jobs)]) # Just so this path gets hit at least once pk.sync_output = False class TestFailException(Exception): def __init__(self, expected, code): self.code = code self.expected = expected @pk.task def test_10(ctx): return_code = ctx.call(sys.executable, exit_10, ignore_errors=True, silent=True) if return_code != 10: raise TestFailException(10, return_code) return_code = ctx.call(sys.executable, exit_10, ignore_errors=True, silent=True, print_cmd=False) if return_code != 10: raise TestFailException(10, return_code) return_code = ctx.call(sys.executable, exit_10, ignore_errors=True, silent=True, collect_output=True) if return_code != 10: raise TestFailException(10, return_code) @pk.task def test_0(ctx): return_code = ctx.call(sys.executable, exit_0, ignore_errors=True, collect_output=True) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, ignore_errors=True, collect_output=True, print_cmd=False) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, collect_output=True) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, print_cmd=False) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, silent=True, print_cmd=False) if return_code != 0: raise TestFailException(0, return_code) try: pk.run(tasks=test_10) except pake.TaskException as err: if isinstance(err.exception, TestFailException): self.fail('pake.TaskContext.call exit_10.py failed to return ' 'correct return code.' 'expected {}, got: {}'.format( err.exception.expected, err.exception.code)) else: print(str(err.exception)) raise err.exception try: pk.run(tasks=test_0) except pake.TaskException as err: if isinstance(err.exception, TestFailException): self.fail('pake.TaskContext.call exit_0.py failed to return ' 'correct return code.' 'expected {}, got: {}'.format( err.exception.expected, err.exception.code)) else: raise err.exception