def test_big_return_data(self): print("Testing big return values") wrapped_function = pynisher.enforce_limits()(return_big_array) for num_elements in [4, 16, 64, 256, 1024, 4096, 16384, 65536, 262144]: bla = wrapped_function(num_elements) self.assertEqual(len(bla), num_elements)
def test_crash_unexpectedly(self): print("Testing an unexpected signal simulating a crash.") wrapped_function = pynisher.enforce_limits( cpu_time_in_s=5)(crash_unexpectedly) self.assertIsNone(wrapped_function(signal.SIGQUIT)) self.assertEqual(wrapped_function.exit_status, pynisher.AnythingException)
def test_kill_subprocesses(self): wrapped_function = pynisher.enforce_limits( wall_time_in_s=1)(spawn_rogue_subprocess) wrapped_function(5) time.sleep(1) p = psutil.Process() self.assertEqual(len(p.children(recursive=True)), 0)
def start_computation(self, job: EvaluationJob) -> Result: result = None try: process_logger = ProcessLogger(self.workdir, job.cid) wrapper = pynisher.enforce_limits(wall_time_in_s=job.cutoff, grace_period_in_s=5, logger=self.logger)(self.compute) c = wrapper(job.ds, job.cid, job.config, self.cfg_cache, job.cfg_keys, job.component, process_logger) if wrapper.exit_status is pynisher.TimeoutException: status = StatusType.TIMEOUT cost = util.worst_score(job.ds.metric) elif wrapper.exit_status is pynisher.MemorylimitException: status = StatusType.MEMOUT cost = util.worst_score(job.ds.metric) elif wrapper.exit_status == 0 and c is not None: status = StatusType.SUCCESS cost = c else: status = StatusType.CRASHED self.logger.debug( f'Worker failed with {c[0] if isinstance(c, Tuple) else c}' ) cost = util.worst_score(job.ds.metric) runtime = Runtime(wrapper.wall_clock_time, timestamp=timeit.default_timer() - self.start_time) if job.config is None: config, partial_configs = process_logger.restore_config( job.component) else: config = job.config partial_configs = None # job.component has to be always a FlexiblePipeline steps = [(name, comp.name()) for name, comp in job.component.steps] result = Result(job.cid, status, config, cost[0], cost[1], runtime, partial_configs) except KeyboardInterrupt: raise except Exception as ex: # Should never occur, just a safety net self.logger.exception( f'Unexpected error during computation: \'{ex}\'') # noinspection PyUnboundLocalVariable result = Result( job.cid, StatusType.CRASHED, config if 'config' in locals() else job.config, util.worst_score(job.ds.metric)[0], util.worst_score(job.ds.metric)[0], None, partial_configs if 'partial_configs' in locals() else None) return result
def test_high_cpu_percentage(self): print("Testing cpu time constraint.") cpu_time_in_s = 2 grace_period = 1 wrapped_function = pynisher.enforce_limits( cpu_time_in_s=cpu_time_in_s, grace_period_in_s=grace_period)(cpu_usage) self.assertEqual(None, wrapped_function()) self.assertEqual(wrapped_function.exit_status, pynisher.CpuTimeoutException)
def calculate( X: np.ndarray, y: np.ndarray, max_nan_percentage: float = 0.9, max_features: int = 10000, random_state: int = 42, timeout: int = 30 ) -> Tuple[Optional[MetaFeaturesDict], Optional[MetaFeatures]]: """ Calculates the meta-features for the given DataFrame. The actual computation is dispatched to another process to prevent crashes due to extensive memory usage. :param X: :param y: :param max_nan_percentage: :param max_features: :param random_state: :param timeout: :return: """ MetaFeatureFactory.logger.debug('Calculating MF') wrapper = pynisher.enforce_limits(wall_time_in_s=timeout, grace_period_in_s=5, logger=MetaFeatureFactory.logger)( MetaFeatureFactory._calculate) res = wrapper(X, y, max_nan_percentage=max_nan_percentage, max_features=max_features, random_state=random_state) # TODO improve error handling if wrapper.exit_status is pynisher.TimeoutException or wrapper.exit_status is pynisher.MemorylimitException: MetaFeatureFactory.logger.warning( 'Failed to extract MF due to resource constraints') return None, None elif wrapper.exit_status is pynisher.AnythingException and isinstance( res, Tuple): MetaFeatureFactory.logger.warning( f'Failed to extract MF due to {res[0]}') return None, None elif wrapper.exit_status == 0 and res is not None: array = np.atleast_2d(np.fromiter(res.values(), dtype=float)) if np.isnan(array).any(): MetaFeatureFactory.logger.warning( f'MF are partially NaN: {res}') return None, None return res, array else: # Last resort... MetaFeatureFactory.logger.warning( 'Failed to extract MF due to unknown reasons') return None, None
def test_capture_output(self): print("Testing capturing of output.") global logger time_limit = 2 grace_period = 1 def print_and_sleep(t): for i in range(t): print(i) time.sleep(1) wrapped_function = pynisher.enforce_limits( wall_time_in_s=time_limit, mem_in_mb=None, grace_period_in_s=grace_period, logger=logger, capture_output=True)(print_and_sleep) wrapped_function(5) self.assertTrue('0' in wrapped_function.stdout) self.assertTrue(wrapped_function.stderr == '') def print_and_fail(): print(0) raise RuntimeError() wrapped_function = pynisher.enforce_limits( wall_time_in_s=time_limit, mem_in_mb=None, grace_period_in_s=grace_period, logger=logger, capture_output=True)(print_and_fail) wrapped_function() self.assertTrue('0' in wrapped_function.stdout) self.assertEqual(RuntimeError, type(wrapped_function.result[0]))
def test_busy_in_C_library(self): global logger wrapped_function = pynisher.enforce_limits( wall_time_in_s=2)(svm_example) start = time.time() wrapped_function(16384, 128) duration = time.time() - start time.sleep(1) p = psutil.Process() self.assertEqual(len(p.children(recursive=True)), 0) self.assertTrue(duration < 2.1)
def nested_pynisher(level=2, cputime=5, walltime=5, memlimit=10e24, increment=-1, grace_period=1): print(f"this is level {level}") if level == 0: spawn_rogue_subprocess(10) else: func = pynisher.enforce_limits( mem_in_mb=memlimit, cpu_time_in_s=cputime, wall_time_in_s=walltime, grace_period_in_s=grace_period)(nested_pynisher) func(level - 1, None, walltime + increment, memlimit, increment)
def test_time_out(self): print("Testing wall clock time constraint.") local_mem_in_mb = None local_wall_time_in_s = 1 local_cpu_time_in_s = None local_grace_period = None wrapped_function = pynisher.enforce_limits( mem_in_mb=local_mem_in_mb, wall_time_in_s=local_wall_time_in_s, cpu_time_in_s=local_cpu_time_in_s, grace_period_in_s=local_grace_period)(simulate_work) for mem in range(1, 10): self.assertIsNone(wrapped_function(mem, 10, 0)) self.assertEqual(wrapped_function.exit_status, pynisher.TimeoutException)
def test_out_of_memory(self): print("Testing memory constraint.") local_mem_in_mb = 32 local_wall_time_in_s = None local_cpu_time_in_s = None local_grace_period = None wrapped_function = pynisher.enforce_limits( mem_in_mb=local_mem_in_mb, wall_time_in_s=local_wall_time_in_s, cpu_time_in_s=local_cpu_time_in_s, grace_period_in_s=local_grace_period)(simulate_work) for mem in [1024, 2048, 4096]: self.assertIsNone(wrapped_function(mem, 0, 0)) self.assertEqual(wrapped_function.exit_status, pynisher.MemorylimitException)
def test_success(self): print("Testing unbounded function call which have to run through!") local_mem_in_mb = None local_wall_time_in_s = None local_cpu_time_in_s = None local_grace_period = None wrapped_function = pynisher.enforce_limits( mem_in_mb=local_mem_in_mb, wall_time_in_s=local_wall_time_in_s, cpu_time_in_s=local_cpu_time_in_s, grace_period_in_s=local_grace_period)(simulate_work) for mem in [1, 2, 4, 8, 16]: self.assertEqual((mem, 0, 0), wrapped_function(mem, 0, 0)) self.assertEqual(wrapped_function.exit_status, 0)
def start_transform_dataset(self, job: EvaluationJob) -> Result: self.logger.info(f'start transforming job {job.cid}') X = None try: wrapper = pynisher.enforce_limits(wall_time_in_s=job.cutoff, grace_period_in_s=5, logger=self.logger)( self.transform_dataset) c = wrapper(job.ds, job.cid, job.component, job.config) if wrapper.exit_status is pynisher.TimeoutException: status = StatusType.TIMEOUT score = util.worst_score(job.ds.metric) elif wrapper.exit_status is pynisher.MemorylimitException: status = StatusType.MEMOUT score = util.worst_score(job.ds.metric) elif wrapper.exit_status == 0 and c is not None: status = StatusType.SUCCESS X, score = c else: status = StatusType.CRASHED self.logger.debug( f'Worker failed with {c[0] if isinstance(c, Tuple) else c}' ) score = util.worst_score(job.ds.metric) result = Result(job.cid, status=status, loss=score[0], structure_loss=score[1], transformed_X=X, runtime=Runtime( wrapper.wall_clock_time, timeit.default_timer() - self.start_time)) except KeyboardInterrupt: raise except Exception as ex: # Should never occur, just a safety net self.logger.exception( f'Unexpected error during computation: \'{ex}\'') result = Result(job.cid, status=StatusType.CRASHED, loss=util.worst_score(job.ds.metric)[0], structure_loss=util.worst_score(job.ds.metric)[1]) return result
def test_liblinear_svc(self): global logger time_limit = 2 grace_period = 1 wrapped_function = pynisher.enforce_limits( cpu_time_in_s=time_limit, mem_in_mb=None, grace_period_in_s=grace_period, logger=logger)(svc_example) start = time.time() wrapped_function(16384, 1000) duration = time.time() - start time.sleep(1) p = psutil.Process() self.assertEqual(len(p.children(recursive=True)), 0) # fails with pynisher.AnythingException for some reason # self.assertTrue(wrapped_function.exit_status == pynisher.CpuTimeoutException) self.assertTrue(duration > time_limit - 0.1) self.assertTrue(duration < time_limit + grace_period + 0.1)