def test_interruptible_can_run_function(self): before = threading.active_count() def some_long_function(exec_time): sleep(exec_time) return 'OK' result = interrupt.interruptible(some_long_function, args=(0.01,), timeout=10) assert (True, 'OK') == result after = threading.active_count() assert before == after
def test_interruptible_stops_execution_on_exception(self): before = threading.active_count() def some_crashing_function(): raise Exception('I have to crash. Now!') results, _ = interrupt.interruptible(some_crashing_function, timeout=1.0) assert 'ERROR: Unknown error:' in results assert 'I have to crash. Now!' in results after = threading.active_count() assert after == before
def test_interruptible_stops_execution_on_timeout(self): before = threading.active_count() def some_long_function(exec_time): for i in range(exec_time): sleep(i) return 'OK' result = interrupt.interruptible(some_long_function, args=(20,), timeout=0.00001) assert (False, 'ERROR: Processing interrupted: timeout after 0 seconds.') == result after = threading.active_count() assert before == after
def test_interruptible_can_run_function(self): before = threading.active_count() def some_long_function(exec_time): sleep(exec_time) return 'OK' results = interrupt.interruptible(some_long_function, args=(0.01,), timeout=10) expected = None, 'OK' assert expected == results after = threading.active_count() assert before == after
def test_interruptible_stops_execution_on_timeout(self): before = threading.active_count() def some_long_function(exec_time): for i in range(exec_time): sleep(i) return 'OK' results = interrupt.interruptible(some_long_function, args=(20,), timeout=0.00001) expected = 'ERROR: Processing interrupted: timeout after 0 seconds.', None assert expected == results after = threading.active_count() assert before == after
def _scanit(paths, scanners, scans_cache_class, diag, timeout=DEFAULT_TIMEOUT): """ Run scans and cache results on disk. Return a tuple of (success, scanned relative path) where sucess is True on success, False on error. Note that this is really only a wrapper function used as an execution unit for parallel processing. """ abs_path, rel_path = paths # always fetch infos and cache. infos = OrderedDict() infos['path'] = rel_path infos.update(scan_infos(abs_path, diag=diag)) success = True scans_cache = scans_cache_class() is_cached = scans_cache.put_info(rel_path, infos) # note: "flag and function" expressions return the function if flag is True # note: the order of the scans matters to show things in logical order scanner_functions = map(lambda t: t[0] and t[1], scanners.values()) scanners = OrderedDict(zip(scanners.keys(), scanner_functions)) if any(scanner_functions): # Skip other scans if already cached # FIXME: ENSURE we only do this for files not directories if not is_cached: # run the scan as an interruptiple task scans_runner = partial(scan_one, abs_path, scanners, diag) # quota keyword args for interruptible success, scan_result = interruptible(scans_runner, timeout=timeout) if not success: # Use scan errors as the scan result for that file on failure this is # a top-level error not attachedd to a specific scanner, hence the # "scan" key is used for these errors scan_result = {'scan_errors': [scan_result]} scans_cache.put_scan(rel_path, infos, scan_result) # do not report success if some other errors happened if scan_result.get('scan_errors'): success = False return success, rel_path