def test_subpake_depth(self): assert_depth_script = os.path.join(script_dir, 'assert_subpake_depth.py') pake.de_init(clear_conf=False) with self.assertRaises(SystemExit) as err: pake.subpake(os.path.join(script_dir, 'throw.py')) with self.assertRaises(pake.SubpakeException) as err: pake.subpake(os.path.join(script_dir, 'throw.py'), call_exit=False) # If pake is not initialized, there is no depth tracking try: pake.subpake(assert_depth_script, '-D', 'DEPTH=0', call_exit=False) except pake.SubpakeException as err: self.fail( 'subpake depth=0 assertion failed, return code {}.'.format( err.returncode)) # Pake must be initialized for depth tracking pake.init() try: pake.subpake(assert_depth_script, '-D', 'DEPTH=1', call_exit=False) except pake.SubpakeException as err: self.fail( 'subpake depth=1 assertion failed, return code {}.'.format( err.returncode))
def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException)
def test_run_changedir(self): pake.de_init(clear_conf=False) start_dir = os.getcwd() dest_dir = os.path.abspath(os.path.join(script_dir, '..')) # Tell pake to change directories on init pk = pake.init(args=['-C', dest_dir]) self.assertEqual(dest_dir, os.getcwd()) self.assertEqual(pake.get_init_dir(), start_dir) @pk.task def check_dir(ctx): if dest_dir != os.getcwd(): raise Exception() self.assertEqual(pake.run(pk, tasks=check_dir, call_exit=False), returncodes.SUCCESS) # Should be back to normal after run self.assertEqual(start_dir, os.getcwd()) # ========================== # Check that its forced quietly before run, even if it is changed prior pake.de_init(clear_conf=False) pk = pake.init(args=['-C', dest_dir]) self.assertEqual(dest_dir, os.getcwd()) @pk.task def check_dir(ctx): if dest_dir != os.getcwd(): raise Exception() # Try changing it back... os.chdir(script_dir) # Directory should change here to dest_dir self.assertEqual(pake.run(pk, tasks=check_dir, call_exit=False), returncodes.SUCCESS) # Should be back to normal after run self.assertEqual(start_dir, os.getcwd())
def test_init(self): pake.de_init(clear_conf=False) self.assertFalse(pake.is_init()) with self.assertRaises(pake.PakeUninitializedException): pake.terminate(pake.Pake(), 0) pk = pake.init() self.assertTrue(pk.stdout is pake.conf.stdout) self.assertTrue(pake.is_init()) self.assertEqual(pake.get_subpake_depth(), 0) self.assertEqual(pake.get_max_jobs(), 1) self.assertEqual(pk.task_count, 0) self.assertEqual(len(pk.task_contexts), 0) this_file = os.path.abspath(__file__) self.assertEqual(pake.get_init_file(), this_file) self.assertEqual(pake.get_init_dir(), os.getcwd()) with self.assertRaises(ValueError): pake.program.run(None) pake.de_init(clear_conf=False) pk = pake.init(args=['--jobs', '10']) self.assertEqual(pake.get_max_jobs(), 10) pake.de_init(clear_conf=False) with self.assertRaises(SystemExit) as cm: pake.init(args=['-D', 'TEST={ I am a bad define']) self.assertEqual(cm.exception.code, returncodes.BAD_DEFINE_VALUE) # These should not throw pake.init(args=['-D', 'TEST= {"I am a good define" } ']) pake.init(args=['-D', 'TEST= "I am a good define" ']) pake.init(args=['-D', 'TEST= 1 am a good define ']) # they are strings pake.init(args=['-D', 'TEST= 1 2 3 ']) pake.init(args=['-D', 'TEST=1 2 3 ']) pake.de_init(clear_conf=False)
def assert_bad_args(*args): pake.de_init(clear_conf=False) try: pake.init(args=list(args)) except SystemExit as err: exit_init_code = err.code self.assertEqual( exit_init_code, returncodes.BAD_ARGUMENTS, msg= 'Passed bad command line arguments, exited with 0 (success).' ) else: self.fail( 'Bad command line arguments were passed to pake.init and it did not exit!' )
def test_normal_exception(self): pake.de_init(clear_conf=False) pk = pake.init() class TestException(Exception): def __init__(self): pass def raise_test(unused): raise TestException() @pk.task def test_submit(ctx): with ctx.multitask() as mt: mt.submit(raise_test, None) @pk.task def test_map(ctx): with ctx.multitask() as mt: mt.map(raise_test, range(0, 5)) with self.assertRaises(pake.TaskException) as te: pk.run(tasks=test_submit) self.assertEqual(type(te.exception.exception), TestException) self.assertEqual( type(te.exception.exception), TestException, msg='test_multitask_exceptions.py: ctx.multitask with mt.submit did ' 'not propagate the correct exception type, expected ' '"TestException", got "{}"'.format(type(te.exception.exception))) with self.assertRaises(pake.TaskException) as te: pk.run(tasks=test_map) self.assertEqual( type(te.exception.exception), TestException, msg='test_multitask_exceptions.py: ctx.multitask with mt.map did ' 'not propagate the correct exception type, expected ' '"TestException", got "{}"'.format(type(te.exception.exception)))
def assert_exit_code(*args, no_tasks=False, all_up_to_date=False, code=returncodes.SUCCESS): pake.de_init(clear_conf=False) try: pk = pake.init(args=list(args)) if no_tasks is False: # Make all tasks up to date, pake does not care # if the input file is the same as the output # it just compares the modification time of whatever is there in_out = os.path.join(script_dir, 'test_data', 'in1') if all_up_to_date else None pk.add_task('dummy', lambda ctx: None, inputs=in_out, outputs=in_out) pk.add_task('dummy2', lambda ctx: None, inputs=in_out, outputs=in_out) got_code = pake.run(pk) self.assertEqual( got_code, code, msg= 'Command line argument resulted in an unexpected exit code. ' 'expected {}, but got {}.'.format(code, got_code)) except SystemExit as err: self.assertEqual( err.code, code, msg= 'Command line argument resulted in an unexpected exit code. ' 'expected {}, but got {}.'.format(code, err.code))
def _is_running_exit_test(self, jobs, exit_method): # Test that the state of pk.is_running and pk.threadpool # are correct even after pake experiences an exception inside # of a task class TestException(Exception): def __init__(self, *args): super().__init__(*args) pake.de_init(clear_conf=False) pk = pake.init() self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None) @pk.task def task_a(ctx): exit_method(pk) def task_b(ctx): pass def task_c(ctx): pass try: pk.run(tasks=[task_a, task_b, task_c], jobs=jobs) except pake.TaskExitException as err: if not isinstance(err.exception, SystemExit): self.fail( 'Unexpected exception "{}" in pake.is_running exit test!, ' 'expected SystemExit.'.format( pake.util.qualified_name(err.__name__))) else: self.fail( 'Expected pake.TaskExitException, no exception was raised!') self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None)
def _is_running_test(self, jobs=1): # Test that the is_running and threadpool properties # of the Pake object maintain the correct state class TestException(Exception): def __init__(self, *args): super().__init__(*args) pake.de_init(clear_conf=False) pk = pake.init() self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None) @pk.task def task_a(ctx): if not pk.is_running: raise TestException( 'Test failed, pk.is_running is False while pake is running.' ) if jobs == 1 and pk.threadpool: raise TestException( 'Test failed, pk.threadpool is NOT None when jobs == 1.') if jobs > 1 and not pk.threadpool: raise TestException( 'Test failed, pk.threadpool is None when jobs > 1.') try: pk.run(tasks=[task_a], jobs=jobs) except pake.TaskException as err: self.fail(str(err.exception)) self.assertEqual(pk.is_running, False) self.assertEqual(pk.threadpool, None)
def test_task_terminate_exception(self): pake.de_init(clear_conf=False) pk = pake.init() @pk.task def test(ctx): time.sleep(0.5) pk.terminate(return_code=100) @pk.task def test2(ctx): time.sleep(0.3) @pk.task def test3(ctx): time.sleep(0.2) # Make sure that terminate() effects even multithreaded builds # The return code with call_exit=False should match the exit code in the task self.assertEqual( pake.run(pk, tasks=[test2, test3, test], jobs=10, call_exit=False), 100) self.assertEqual( pake.run(pk, tasks=[test2, test3, test], call_exit=False), 100) with self.assertRaises(pake.TaskExitException) as exc: pk.run(tasks=[test2, test3, test]) self.assertEqual(type(exc.exception.exception), pake.TerminateException) self.assertEqual(exc.exception.task_name, 'test') self.assertEqual(exc.exception.return_code, 100)
def test_exceptions(self): pake.de_init(clear_conf=False) self.assertFalse(pake.program.is_init()) with self.assertRaises(pake.PakeUninitializedException): pake.program.get_max_jobs() with self.assertRaises(pake.PakeUninitializedException): pake.program.get_subpake_depth() with self.assertRaises(pake.PakeUninitializedException): pake.program.get_init_dir() with self.assertRaises(pake.PakeUninitializedException): pake.program.get_init_file() with self.assertRaises(pake.PakeUninitializedException): pake.program.run(pake.Pake()) with self.assertRaises(pake.PakeUninitializedException): pake.program.run(None)
def _call_test(self, jobs): exit_10 = os.path.join(script_dir, 'exit_10.py') exit_0 = os.path.join(script_dir, 'exit_0.py') pake.de_init(clear_conf=False) pk = pake.init(args=['--jobs', str(jobs)]) # Just so this path gets hit at least once pk.sync_output = False class TestFailException(Exception): def __init__(self, expected, code): self.code = code self.expected = expected @pk.task def test_10(ctx): return_code = ctx.call(sys.executable, exit_10, ignore_errors=True, silent=True) if return_code != 10: raise TestFailException(10, return_code) return_code = ctx.call(sys.executable, exit_10, ignore_errors=True, silent=True, print_cmd=False) if return_code != 10: raise TestFailException(10, return_code) return_code = ctx.call(sys.executable, exit_10, ignore_errors=True, silent=True, collect_output=True) if return_code != 10: raise TestFailException(10, return_code) @pk.task def test_0(ctx): return_code = ctx.call(sys.executable, exit_0, ignore_errors=True, collect_output=True) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, ignore_errors=True, collect_output=True, print_cmd=False) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, collect_output=True) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, print_cmd=False) if return_code != 0: raise TestFailException(0, return_code) return_code = ctx.call(sys.executable, exit_0, silent=True, print_cmd=False) if return_code != 0: raise TestFailException(0, return_code) try: pk.run(tasks=test_10) except pake.TaskException as err: if isinstance(err.exception, TestFailException): self.fail('pake.TaskContext.call exit_10.py failed to return ' 'correct return code.' 'expected {}, got: {}'.format( err.exception.expected, err.exception.code)) else: print(str(err.exception)) raise err.exception try: pk.run(tasks=test_0) except pake.TaskException as err: if isinstance(err.exception, TestFailException): self.fail('pake.TaskContext.call exit_0.py failed to return ' 'correct return code.' 'expected {}, got: {}'.format( err.exception.expected, err.exception.code)) else: raise err.exception
def test_registration_and_run(self): pake.de_init(clear_conf=False) pk = pake.init() def undefined_task(): pass with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_name(undefined_task) with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_context(undefined_task) with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_name("undefined_task") with self.assertRaises(pake.UndefinedTaskException): _ = pk.get_task_context("undefined_task") script_path = os.path.dirname(os.path.abspath(__file__)) in1 = os.path.join(script_path, 'test_data', 'in1') # Does not need to exist, easier than dealing with # a full path. out1 = 'out1' pake.util.touch(in1) in2 = os.path.join(script_path, 'test_data', 'in2') # Does not need to exist either. out2 = 'out2' pake.util.touch(in2) @pk.task(o='dep_one.o') def dep_one(ctx): pass @pk.task(o=['dep_two.o', out2]) def dep_two(ctx): pass @pk.task(o='dep_three.o') def dep_three(ctx): pass @pk.task(dep_one, dep_two, i=in1, o=out1) def task_one(ctx): nonlocal self self.assertListEqual(ctx.inputs, [in1]) self.assertListEqual(ctx.outputs, [out1]) self.assertListEqual(ctx.outdated_inputs, [in1]) self.assertListEqual(ctx.outdated_outputs, [out1]) self.assertListEqual(list(ctx.outdated_pairs), [(in1, out1)]) # Check that the correct immediate dependency outputs are reported. self.assertCountEqual(['dep_one.o', 'dep_two.o', out2], ctx.dependency_outputs) dep_one_ctx = pk.get_task_context(dep_one) dep_two_ctx = pk.get_task_context(dep_two) # Check that the correct immediate dependencies are reported. self.assertCountEqual([dep_one_ctx, dep_two_ctx], ctx.dependencies) def other_task(ctx): nonlocal self self.assertListEqual(ctx.inputs, [in2]) self.assertListEqual(ctx.outputs, [out2]) self.assertListEqual(ctx.outdated_inputs, [in2]) self.assertListEqual(ctx.outdated_outputs, [out2]) self.assertListEqual(list(ctx.outdated_pairs), [(in2, out2)]) task_one_ctx = pk.get_task_context(task_one) dep_three_ctx = pk.get_task_context(dep_three) # Check that the correct immediate dependency outputs are reported. self.assertCountEqual(['dep_three.o', out1], ctx.dependency_outputs) # Check that the correct immediate dependencies are reported. self.assertCountEqual([task_one_ctx, dep_three_ctx], ctx.dependencies) ctx = pk.add_task('task_two', other_task, inputs=in2, outputs=out2, dependencies=[task_one, dep_three]) task_one_ctx = pk.get_task_context(task_one) dep_three_ctx = pk.get_task_context(dep_three) # Check that the correct immediate dependencies are reported. # ctx.dependencies should return a meaningful value outside of a task # as well as inside. That is not the case with ctx.dependency_outputs self.assertCountEqual([task_one_ctx, dep_three_ctx], ctx.dependencies) # Not available yet self.assertListEqual([], ctx.dependency_outputs) # Not available yet self.assertListEqual([], ctx.outputs) # Not available yet self.assertListEqual([], ctx.inputs) # Not available yet self.assertListEqual([], ctx.outdated_outputs) # Not available yet self.assertListEqual([], ctx.outdated_inputs) self.assertEqual(ctx.name, 'task_two') self.assertEqual(ctx, pk.get_task_context('task_two')) self.assertEqual(ctx, pk.get_task_context(other_task)) self.assertEqual(pk.get_task_context('task_one'), pk.get_task_context(task_one)) self.assertEqual(pk.get_task_name(task_one), 'task_one') self.assertEqual(pk.get_task_name(other_task), 'task_two') self.assertEqual(pk.task_count, 5) self.assertEqual(len(pk.task_contexts), 5) with self.assertRaises(pake.UndefinedTaskException): pk.get_task_context('undefined') with self.assertRaises(pake.UndefinedTaskException): pk.get_task_name('undefined') with self.assertRaises(ValueError): pk.get_task_name(1) with self.assertRaises(ValueError): pk.get_task_name(None) with self.assertRaises(pake.RedefinedTaskException): pk.add_task('task_one', task_one) with self.assertRaises(pake.RedefinedTaskException): pk.add_task('task_two', other_task) # Raises an exception if there is an issue # Makes this test easier to debug pk.run(tasks='task_two') with self.assertRaises(ValueError): # Because jobs <= 1 pk.run(tasks='task_two', jobs=-1) with self.assertRaises(ValueError): # Because jobs <= 1 pk.run(tasks='task_two', jobs=0) with self.assertRaises(ValueError): # Because tasks is None pk.run(tasks=None) with self.assertRaises(ValueError): # Because tasks is empty pk.run(tasks=[]) self.assertEqual(pake.run(pk, tasks=['task_two'], call_exit=False), 0) self.assertEqual(pk.run_count, 5)
def _basic_behavior_test(self, jobs): pake.de_init(clear_conf=False) pk = pake.init() ran = False # runs because 'test' is missing @pk.task(i=[], o=['test']) def task_a(ctx): nonlocal ran, self ran = True self.assertTrue(len(ctx.outputs) == 1) self.assertTrue(len(ctx.outdated_outputs) == 1) self.assertTrue(ctx.outdated_outputs[0] == 'test') pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # runs because 'test' is missing @pk.task(o=['test']) def task_a(ctx): nonlocal ran, self ran = True self.assertTrue(len(ctx.outputs) == 1) self.assertTrue(len(ctx.outdated_outputs) == 1) self.assertTrue(ctx.outdated_outputs[0] == 'test') pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Always runs @pk.task def task_a(ctx): nonlocal ran, self self.assertTrue(len(ctx.outputs) == 0) self.assertTrue(len(ctx.inputs) == 0) self.assertTrue(len(ctx.outdated_outputs) == 0) self.assertTrue(len(ctx.outdated_inputs) == 0) ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Always runs @pk.task(i=None, o=None) def task_a(ctx): nonlocal ran, self self.assertTrue(len(ctx.outputs) == 0) self.assertTrue(len(ctx.inputs) == 0) self.assertTrue(len(ctx.outdated_outputs) == 0) self.assertTrue(len(ctx.outdated_inputs) == 0) ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Wont ever run @pk.task(i=[], o=[]) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertFalse(ran) # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Wont ever run @pk.task(i=pake.glob('*.theres_nothing_named_this_in_the_directory'), o=pake.pattern('%.o')) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertFalse(ran)
import pake import pake.util # init once for defines/exports, de_init before next init # STDIN defines from pake.subpake are cached, pake.de_init # does not clear them defines = pake.init() assert '__PAKE_SYNC_OUTPUT' in os.environ expected_value = defines['SYNC_OUTPUT_EXPECTED_VALUE'] if defines.has_define('INIT_SYNC_OUTPUT_VALUE'): init_sync_output = defines['INIT_SYNC_OUTPUT_VALUE'] pake.de_init() assert '__PAKE_SYNC_OUTPUT' not in os.environ pk = pake.init(sync_output=init_sync_output) assert '__PAKE_SYNC_OUTPUT' in os.environ else: pake.de_init() assert '__PAKE_SYNC_OUTPUT' not in os.environ pk = pake.init() assert '__PAKE_SYNC_OUTPUT' in os.environ if pk.sync_output != expected_value: raise Exception('pk.sync_output was {}, expected {}!'.format( pk.sync_output, expected_value))
def test_subpake_ignore_errors(self): return_code_pakefile = os.path.join(script_dir, 'returncode_pakefile.py') pake.de_init(clear_conf=False) def assert_return_code(code, ignore_errors, **kwargs): nonlocal self if ignore_errors: self.assertEqual( pake.subpake(return_code_pakefile, ignore_errors=True, **kwargs), code) else: try: self.assertEqual( pake.subpake(return_code_pakefile, ignore_errors=False, call_exit=False, **kwargs), code) except pake.SubpakeException as err: err.write_info(pake.conf.stdout ) # Prevent unit test resource warnings self.assertEqual(err.returncode, code) # ====== Non Silent Path ======= collect_output_test_lock = threading.Lock() pake.de_init(clear_conf=False) pake.export('RETURNCODE', 42) assert_return_code(42, ignore_errors=True) assert_return_code(42, ignore_errors=False) pake.export('TERMINATE', True) assert_return_code(42, ignore_errors=True) assert_return_code(42, ignore_errors=False) pake.EXPORTS.clear() # ====== Non Silent / Collect Output Path ======= pake.de_init(clear_conf=False) pake.export('RETURNCODE', 42) assert_return_code(42, ignore_errors=True, collect_output=True) assert_return_code(42, ignore_errors=False, collect_output=True) assert_return_code(42, ignore_errors=True, collect_output=True, collect_output_lock=collect_output_test_lock) assert_return_code(42, ignore_errors=False, collect_output=True, collect_output_lock=collect_output_test_lock) pake.export('TERMINATE', True) assert_return_code(42, ignore_errors=True, collect_output=True) assert_return_code(42, ignore_errors=False, collect_output=True) pake.EXPORTS.clear() # ======= Silent Path ========= pake.de_init(clear_conf=False) pake.export('RETURNCODE', 42) assert_return_code(42, ignore_errors=True, silent=True) assert_return_code(42, ignore_errors=False, silent=True) pake.export('TERMINATE', True) assert_return_code(42, ignore_errors=True, silent=True) assert_return_code(42, ignore_errors=False, silent=True) pake.EXPORTS.clear() # ======= Silent / Collect Output Path ========= pake.de_init(clear_conf=False) pake.export('RETURNCODE', 42) assert_return_code(42, ignore_errors=True, silent=True, collect_output=True) assert_return_code(42, ignore_errors=False, silent=True, collect_output=True) assert_return_code(42, ignore_errors=True, silent=True, collect_output=True, collect_output_lock=collect_output_test_lock) assert_return_code(42, ignore_errors=False, silent=True, collect_output=True, collect_output_lock=collect_output_test_lock) pake.export('TERMINATE', True) assert_return_code(42, ignore_errors=True, silent=True, collect_output=True) assert_return_code(42, ignore_errors=False, silent=True, collect_output=True) assert_return_code(42, ignore_errors=True, silent=True, collect_output=True, collect_output_lock=collect_output_test_lock) assert_return_code(42, ignore_errors=False, silent=True, collect_output=True, collect_output_lock=collect_output_test_lock) pake.EXPORTS.clear()
def _exceptions_test(self, jobs): pake.de_init(clear_conf=False) pk = pake.init() # MissingOutputFilesException, even if 'test' does not exist on disk @pk.task(i=['test'], o=[]) def task_a(ctx): pass with self.assertRaises(pake.MissingOutputsException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # MissingOutputFilesException, even if 'test' does not exist on disk @pk.task(i=['test']) def task_a(ctx): pass with self.assertRaises(pake.MissingOutputsException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # MissingOutputFilesException, even if 'test' and 'test2' do not exist on disk @pk.task(i=['test', 'test2']) def task_a(ctx): pass with self.assertRaises(pake.MissingOutputsException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # InputFileNotFoundException, since usage is valid but a.c is missing @pk.task(i=['a.c'], o=['a.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case above, but this time the output exists @pk.task(i=['a.c'], o=os.path.join(script_dir, 'test_data', 'out1')) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but with multiple inputs @pk.task(i=['a.c', 'b.c'], o=['a.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but the output exists this time around. @pk.task(i=['a.c', 'b.c'], o=os.path.join(script_dir, 'test_data', 'out1')) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but with multiple inputs and outputs, # one of the outputs exists this time around. @pk.task(i=['a.c', 'b.c'], o=['a.o', os.path.join(script_dir, 'test_data', 'out1')]) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but this time both outputs don't exist @pk.task(i=['a.c', 'b.c'], o=['a.o', 'b.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but this time there are # more outputs than inputs @pk.task(i=['a.c', 'b.c'], o=['a.o', 'b.o', 'c.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs) # ================ pake.de_init(clear_conf=False) pk = pake.init() # Check the same case as above but this time there are # more inputs than outputs @pk.task(i=['a.c', 'b.c', 'c.o'], o=['a.o', 'b.o']) def task_a(ctx): pass with self.assertRaises(pake.InputNotFoundException): pk.run(tasks=task_a, jobs=jobs)
def m_init(): if call_init: pake.de_init(clear_conf=False) pake.init()
def _test_stub(self, call_init): # This clears any internal environmental # vars pake sets up, by default pake.de_init(clear_conf=False) assert_sync_output = os.path.join(script_dir, 'assert_sync_output.py') # os.environ['INIT_SYNC_OUTPUT_VALUE'], value is passed to pake.init(sync_output=..) in assert script # if the environmental variable is not set, no value is passed indicating no manual override to pake # os.environ['SYNC_OUTPUT_EXPECTED_VALUE'] # The value of pake.Pake.sync_output must equal this value after pake.init, or the script will assert def set_init_sync_output_value(value): pake.export('INIT_SYNC_OUTPUT_VALUE', value) def del_init_sync_output_value(): # Prevent passing to pake.init all together # the **kwargs argument will not be specified if 'INIT_SYNC_OUTPUT_VALUE' in pake.EXPORTS: del pake.EXPORTS['INIT_SYNC_OUTPUT_VALUE'] def set_expected_value(value): pake.export('SYNC_OUTPUT_EXPECTED_VALUE', value) def del_expected_value(): if 'SYNC_OUTPUT_EXPECTED_VALUE' in pake.EXPORTS: del pake.EXPORTS['SYNC_OUTPUT_EXPECTED_VALUE'] def clean_env(): pake.EXPORTS.clear() if 'PAKE_SYNC_OUTPUT' in os.environ: del os.environ['PAKE_SYNC_OUTPUT'] def assert_subpake_success(*args, msg=None): try: pake.subpake(assert_sync_output, *args, call_exit=False) except pake.SubpakeException as err: err.write_info(sys.stderr) self.fail(msg=msg) def m_init(): if call_init: pake.de_init(clear_conf=False) pake.init() # delete PAKE_SYNC_OUTPUT environmental variable, clear all pake.EXPORTS clean_env() # Test simple cases m_init() set_expected_value(True) assert_subpake_success(msg='pake.sync_output should default to true when PAKE_SYNC_OUTPUT ' 'and --sync-output not used.') m_init() set_expected_value(False) assert_subpake_success('--sync-output', False, msg='pake.sync_output did not match --sync-output False.') m_init() set_expected_value(True) assert_subpake_success('--sync-output', True, msg='pake.sync_output did not match --sync-output True.') m_init() set_expected_value(False) assert_subpake_success('--sync-output', 0, msg='pake.sync_output did not match --sync-output 0.') m_init() set_expected_value(True) assert_subpake_success('--sync-output', 1, msg='pake.sync_output did not match --sync-output 1.') m_init() set_init_sync_output_value(False) set_expected_value(False) assert_subpake_success(msg='pake.sync_output did not match pake.init(sync-output=False).') m_init() set_init_sync_output_value(True) set_expected_value(True) assert_subpake_success(msg='pake.sync_output did not match pake.init(sync-output=True).') m_init() set_init_sync_output_value(None) set_expected_value(True) assert_subpake_success(msg='pake.sync_output did not match pake.init(sync-output=None).' 'None == unspecified, use default of True') # Test overriding the environment with --sync-output (the command line) # ===================================================================== os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() del_init_sync_output_value() set_expected_value(True) assert_subpake_success('--sync-output', True, msg='pake.sync_output --sync-output True should override the environmental ' 'variable PAKE_SYNC_OUTPUT=0.') # Test override False ENV from command line with --sync-output 1 os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() del_init_sync_output_value() set_expected_value(True) assert_subpake_success('--sync-output', 1, msg='pake.sync_output --sync-output 1 should override the environmental ' 'variable PAKE_SYNC_OUTPUT=0.') # Test override True ENV from command line with --sync-output False os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() del_init_sync_output_value() set_expected_value(False) assert_subpake_success('--sync-output', False, msg='pake.sync_output --sync-output False should override the environmental ' 'variable PAKE_SYNC_OUTPUT=1.') # Test override True ENV from command line with --sync-output 0 os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() del_init_sync_output_value() set_expected_value(False) assert_subpake_success('--sync-output', 0, msg='pake.sync_output --sync-output 0 should override the environmental ' 'variable PAKE_SYNC_OUTPUT=1.') clean_env() # Test overriding the command line from pake.init # =============================================== # Test override command line --sync-output True with pake.init(sync_output=False) m_init() set_init_sync_output_value(False) # set whats passed to pake.init set_expected_value(False) # it should win against everything assert_subpake_success('--sync-output', True, msg='setting pake.init(sync_output=...) should override --sync-output and the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test override command line --sync-output False with pake.init(sync_output=True) m_init() set_init_sync_output_value(True) # set whats passed to pake.init set_expected_value(True) # it should win against everything assert_subpake_success('--sync-output', False, msg='setting pake.init(sync_output=...) should override --sync-output and the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test override command line --sync-output 1 with pake.init(sync_output=False) m_init() set_init_sync_output_value(False) # set whats passed to pake.init set_expected_value(False) # it should win against everything assert_subpake_success('--sync-output', 1, msg='setting pake.init(sync_output=...) should override --sync-output and the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test override command line --sync-output 0 with pake.init(sync_output=True) m_init() set_init_sync_output_value(True) # set whats passed to pake.init set_expected_value(True) # it should win against everything assert_subpake_success('--sync-output', 0, msg='setting pake.init(sync_output=...) should override --sync-output and the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test that --sync-output True is not overridden when pake.init(sync_output=None) m_init() set_init_sync_output_value(None) # set whats passed to pake.init, None is the same as not specifying set_expected_value(True) # it should win against everything assert_subpake_success('--sync-output', True, msg='setting pake.init(sync_output=None) should NOT override --sync-output or the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test that --sync-output False is not overridden when pake.init(sync_output=None) m_init() set_init_sync_output_value(None) # set whats passed to pake.init, None is the same as not specifying set_expected_value(False) # it should win against everything assert_subpake_success('--sync-output', False, msg='setting pake.init(sync_output=None) should NOT override --sync-output or the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test that --sync-output 1 is not overridden when pake.init(sync_output=None) m_init() set_init_sync_output_value(None) # set whats passed to pake.init, None is the same as not specifying set_expected_value(True) # it should win against everything assert_subpake_success('--sync-output', 1, msg='setting pake.init(sync_output=None) should NOT override --sync-output or the ' 'environmental variable PAKE_SYNC_OUTPUT.') # Test that --sync-output 0 is not overridden when pake.init(sync_output=None) m_init() set_init_sync_output_value(None) # set whats passed to pake.init, None is the same as not specifying set_expected_value(False) # it should win against everything assert_subpake_success('--sync-output', 0, msg='setting pake.init(sync_output=None) should NOT override --sync-output or the ' 'environmental variable PAKE_SYNC_OUTPUT.') clean_env() # Test overriding the environment with pake.init # ============================================== # Test override False ENV from command line with pake.init(sync_output=True) os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(True) set_expected_value(True) assert_subpake_success(msg='pake.sync_output pake.init(sync_output=True) should ' 'override the environmental variable PAKE_SYNC_OUTPUT=0.') # Test override True ENV from command line with pake.init(sync_output=False) os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(False) set_expected_value(False) assert_subpake_success(msg='pake.sync_output pake.init(sync_output=False) should ' 'override the environmental variable PAKE_SYNC_OUTPUT=1.') # Test that PAKE_SYNC_OUTPUT=1 is not overridden when pake.init(sync_output=None) os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(None) set_expected_value(True) assert_subpake_success(msg='pake.sync_output pake.init(sync_output=None) should ' 'NOT override the environmental variable PAKE_SYNC_OUTPUT=1.') # Test that PAKE_SYNC_OUTPUT=0 is not overridden when pake.init(sync_output=None) os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(None) set_expected_value(False) assert_subpake_success(msg='pake.sync_output pake.init(sync_output=None) should ' 'NOT override the environmental variable PAKE_SYNC_OUTPUT=0.') # Test that pake.init overrides both the environment and the command line # ======================================================================= os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(True) set_expected_value(True) assert_subpake_success('--sync-output', 0, msg='pake.sync_output pake.init(sync_output=True) should override ' 'both --sync-output 0 and PAKE_SYNC_OUTPUT=0.') # Test override False ENV from command line with --sync-output 1 os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(True) set_expected_value(True) assert_subpake_success('--sync-output', False, msg='pake.sync_output pake.init(sync_output=True) should override ' 'both --sync-output False and PAKE_SYNC_OUTPUT=0.') # Test override True ENV from command line with --sync-output False os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(False) set_expected_value(False) assert_subpake_success('--sync-output', True, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output True and PAKE_SYNC_OUTPUT=1.') # Test override True ENV from command line with --sync-output 0 os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(False) set_expected_value(False) assert_subpake_success('--sync-output', 1, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output 1 and PAKE_SYNC_OUTPUT=1.') clean_env() # Few mix matched tests with all three methods # of specifying sync_output present at the same time # ================================================== os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(False) set_expected_value(False) assert_subpake_success('--sync-output', 0, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output 0 and PAKE_SYNC_OUTPUT=1.') os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(False) set_expected_value(False) assert_subpake_success('--sync-output', 1, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output 1 and PAKE_SYNC_OUTPUT=0.') os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(True) set_expected_value(True) assert_subpake_success('--sync-output', 1, msg='pake.sync_output pake.init(sync_output=True) should override ' 'both --sync-output 1 and PAKE_SYNC_OUTPUT=0.') os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(True) set_expected_value(True) assert_subpake_success('--sync-output', 0, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output 0 and PAKE_SYNC_OUTPUT=1.') os.environ['PAKE_SYNC_OUTPUT'] = '1' m_init() set_init_sync_output_value(None) # pake.init(sync_output=None) set_expected_value(False) # Because the command line option overrides the environment assert_subpake_success('--sync-output', 0, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output 0 and PAKE_SYNC_OUTPUT=1.') os.environ['PAKE_SYNC_OUTPUT'] = '0' m_init() set_init_sync_output_value(None) # pake.init(sync_output=None) set_expected_value(True) # Because the command line option overrides the environment assert_subpake_success('--sync-output', 1, msg='pake.sync_output pake.init(sync_output=False) should override ' 'both --sync-output 0 and PAKE_SYNC_OUTPUT=1.') clean_env()
def test_aggregate_exception(self): pake.de_init(clear_conf=False) pk = pake.init() class TestException(Exception): def __init__(self, exc_id): self.exc_id = exc_id def raise_test(exc_id): raise TestException(exc_id) test_submit_exc_count = 5 test_map_exc_count = 7 @pk.task def test_submit(ctx): # Throw some extra tasks in with ctx.multitask() as mt: mt.aggregate_exceptions = True for i in range(0, 10): if i < test_submit_exc_count: mt.submit(raise_test, i) else: mt.submit(lambda: None) @pk.task def test_map(ctx): # Test that the map function of the executor # aggregates exceptions. It is just using .submit # under the hood so it should be fine, test anyway with ctx.multitask(aggregate_exceptions=True) as mt: arguments = range(0, 18) def should_raise(argument): if argument < test_map_exc_count: raise_test(argument) mt.map(should_raise, arguments) # Assert that the correct amount of exceptions # were raised, and that their ID's were unique def all_unique(x): seen = set() return not any(i in seen or seen.add(i) for i in x) def assert_exception_count(task, count, jobs): task_name = pk.get_task_name(task) try: pk.run(tasks=task, jobs=jobs) except pake.TaskException as err: if isinstance(err.exception, pake.AggregateException): aggregate = err.exception self.assertEqual( len(aggregate.exceptions), count, msg= 'test_multitask_exceptions.py: Task Name: "{}", Expected {} ' 'exceptions to have been aggregated, got: {}'.format( task_name, count, len(aggregate.exceptions))) self.assertTrue( all_unique(i.exc_id for i in aggregate.exceptions), msg= 'test_multitask_exceptions.py: Task Name: "{}", aggregate ' 'exception contained the same exception more than once.' .format(task_name)) # test for exceptions aggregate.write_info() else: self.fail( msg= 'test_multitask_exceptions.py: Task Name: "{}", Expected a ' 'pake.AggregateException to be raised, got: {}'.format( task_name, err.exception)) else: self.fail( msg= 'test_multitask_exceptions.py: Task Name: "{}", Expected ' 'pake.AggregateException to be raised and cause a pake.TaskException' .format(task_name)) assert_exception_count(test_submit, 5, jobs=1) assert_exception_count(test_map, 7, jobs=1) assert_exception_count(test_submit, 5, jobs=10) assert_exception_count(test_map, 7, jobs=10)
def _existing_files_test(self, jobs): # Test file comparisons when outputs already exist in1 = os.path.join(script_dir, 'test_data', 'in1') in2 = os.path.join(script_dir, 'test_data', 'in2') out1 = os.path.join(script_dir, 'test_data', 'out1') out2 = os.path.join(script_dir, 'test_data', 'out2') # ================ pake.de_init(clear_conf=False) pk = pake.init() ran = False # Make all the modification times ancient os.utime(in1, (0, 0)) os.utime(in2, (0, 0)) os.utime(out1, (0, 0)) os.utime(out2, (0, 0)) # Make an input recent pake.FileHelper().touch(in1) # This should run @pk.task(i=[in1, in2], o=out1) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran) # ================ # Same test as above, except with multiple outputs pake.de_init(clear_conf=False) pk = pake.init() ran = False # Make all the modification times ancient os.utime(in1, (0, 0)) os.utime(in2, (0, 0)) os.utime(out1, (0, 0)) os.utime(out2, (0, 0)) # Make an input recent pake.FileHelper().touch(in2) # This should run @pk.task(i=[in1, in2], o=[out1, out2]) def task_a(ctx): nonlocal ran ran = True pk.run(tasks=task_a, jobs=jobs) self.assertTrue(ran)
def test_show_header(self): # This test verifies that the pake.Pake.show_task_headers option # and the show_header parameter of pk.add_task and pk.task # are working correctly in conjunction with each other with tempfile.TemporaryFile(mode='w+') as pk_stdout: pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) self.assertTrue(pk.show_task_headers) @pk.task def test_task(ctx): # I will print a header by default pass pake.run(pk, tasks=test_task) self.assertGreater( pk_stdout.tell(), 0, msg='Task with show_header=None (default value) did not write ' 'a header when pk.show_task_headers=True.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) self.assertTrue(pk.show_task_headers) @pk.task(show_header=False) def test_task(ctx): # I will print nothing at all, # even if pk.show_task_headers is True pass pake.run(pk, tasks=test_task) self.assertEqual( pk_stdout.tell(), 0, msg= 'Task with show_header=False wrote to header to pakes output ' 'when pk.show_task_headers=True.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) pk.show_task_headers = False @pk.task def test_task(ctx): # I will print nothing at all, # because pk.show_task_headers is False # and it was not overridden with show_header=True pass pake.run(pk, tasks=test_task) self.assertEqual( pk_stdout.tell(), 0, msg='Task with show_header=None (default value) wrote a header ' 'to pakes output when pk.show_task_headers=False.') pk_stdout.seek(0) # ============ pake.de_init(clear_conf=False) pk = pake.init(stdout=pk_stdout) pk.show_task_headers = False @pk.task(show_header=True) def test_task(ctx): # I will print a header regardless # of pk.show_task_headers being False, # because show_header has been forced to True # on the task pass pake.run(pk, tasks=test_task) self.assertGreater( pk_stdout.tell(), 0, msg='Task with show_header=True did not write header to pakes ' 'output when pk.show_task_headers=False.')
def test_run_helper(dry_run=False): nonlocal self pake.de_init(clear_conf=False) if dry_run: pk = pake.init(args=['--dry-run']) else: pk = pake.init() # No tasks defined self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_DEFINED) @pk.task def task_one(): raise Exception() # No tasks specified self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_SPECIFIED) # Undefined task self.assertEqual(pake.run(pk, tasks='undefined', call_exit=False), returncodes.UNDEFINED_TASK) if not dry_run: # Exception in task self.assertEqual( pake.run(pk, tasks='task_one', call_exit=False), returncodes.TASK_EXCEPTION) @pk.task(i='IDontExist.nope', o='nada') def task_two(): pass # Input file not found self.assertEqual(pake.run(pk, tasks='task_two', call_exit=False), returncodes.TASK_INPUT_NOT_FOUND) @pk.task(i='IDontExist.nope') def task_three(): pass # Missing output file self.assertEqual(pake.run(pk, tasks='task_three', call_exit=False), returncodes.TASK_OUTPUT_MISSING) # ======== Cover Subpake and Call exception propagation @pk.task def task_four(ctx): ctx.subpake(os.path.join(script_dir, 'throw.py')) @pk.task def task_five(ctx): # execute with the current interpreter ctx.call(sys.executable, os.path.join(script_dir, 'throw.py')) if not dry_run: # Because 'throw.py' runs but throws an exception self.assertEqual( pake.run(pk, tasks='task_four', call_exit=False), returncodes.SUBPAKE_EXCEPTION) # Same thing, except differentiate as a task subprocess exception self.assertEqual( pake.run(pk, tasks=task_five, call_exit=False), returncodes.TASK_SUBPROCESS_EXCEPTION)
def test_run(self): pake.de_init(clear_conf=False) pk = pake.init() # should still be parsed and the object available, even with no arguments passed self.assertTrue(pake.arguments.args_are_parsed()) run_count = 0 @pk.task def task_one(ctx): nonlocal run_count run_count += 1 @pk.task def task_two(ctx): nonlocal run_count run_count += 1 self.assertEqual(pk.task_count, 2) pake.run(pk, tasks=[task_one, 'task_two']) self.assertEqual(pk.task_count, 2) self.assertEqual(pk.run_count, run_count) with self.assertRaises(ValueError): # Because jobs <= 1 pake.run(pk, tasks=[task_one, 'task_two'], jobs=0) with self.assertRaises(ValueError): # Because jobs <= 1 pake.run(pk, tasks=[task_one, 'task_two'], jobs=-1) # =========== def test_run_helper(dry_run=False): nonlocal self pake.de_init(clear_conf=False) if dry_run: pk = pake.init(args=['--dry-run']) else: pk = pake.init() # No tasks defined self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_DEFINED) @pk.task def task_one(): raise Exception() # No tasks specified self.assertEqual(pake.run(pk, call_exit=False), returncodes.NO_TASKS_SPECIFIED) # Undefined task self.assertEqual(pake.run(pk, tasks='undefined', call_exit=False), returncodes.UNDEFINED_TASK) if not dry_run: # Exception in task self.assertEqual( pake.run(pk, tasks='task_one', call_exit=False), returncodes.TASK_EXCEPTION) @pk.task(i='IDontExist.nope', o='nada') def task_two(): pass # Input file not found self.assertEqual(pake.run(pk, tasks='task_two', call_exit=False), returncodes.TASK_INPUT_NOT_FOUND) @pk.task(i='IDontExist.nope') def task_three(): pass # Missing output file self.assertEqual(pake.run(pk, tasks='task_three', call_exit=False), returncodes.TASK_OUTPUT_MISSING) # ======== Cover Subpake and Call exception propagation @pk.task def task_four(ctx): ctx.subpake(os.path.join(script_dir, 'throw.py')) @pk.task def task_five(ctx): # execute with the current interpreter ctx.call(sys.executable, os.path.join(script_dir, 'throw.py')) if not dry_run: # Because 'throw.py' runs but throws an exception self.assertEqual( pake.run(pk, tasks='task_four', call_exit=False), returncodes.SUBPAKE_EXCEPTION) # Same thing, except differentiate as a task subprocess exception self.assertEqual( pake.run(pk, tasks=task_five, call_exit=False), returncodes.TASK_SUBPROCESS_EXCEPTION) test_run_helper() test_run_helper(True)