def test_raised_exception_by_timeout(self): with self.assertRaises(concurrent.futures.TimeoutError): parallel_object = ParallelObject(self.rand_timeouts, timeout=10, num_workers=len( self.rand_timeouts)) parallel_object.run(dummy_func_return_tuple)
def test_exception_raised_in_thread_by_func(self): with self.assertRaises(DummyException): parallel_object = ParallelObject(self.rand_timeouts, timeout=30, num_workers=len( self.rand_timeouts)) parallel_object.run(dummy_func_raising_exception)
def run_workload(self, stress_cmd, nemesis=False, sub_type=None): def _run_workload(scylla_cluster_stats, stress_cmd, nemesis): scylla_cluster_stats.run_workload(stress_cmd=stress_cmd, nemesis=nemesis) self.log.info("Running workload in parallel with following command:\n%s", stress_cmd) object_set = ParallelObject(timeout=self.load_iteration_timeout_sec, objects=[ [scs, stress_cmd, nemesis] for scs in self.scylla_clusters_stats]) object_set.run(func=_run_workload, unpack_objects=True, ignore_exceptions=False)
def test_raised_exception_by_timeout(self): test_timeout = min(self.rand_timeouts) start_time = time.time() with self.assertRaises(concurrent.futures.TimeoutError): parallel_object = ParallelObject(self.rand_timeouts, timeout=test_timeout) parallel_object.run(dummy_func_return_tuple) run_time = int(time.time() - start_time) self.assertAlmostEqual(first=test_timeout, second=run_time, delta=1)
def run_mixed_workload(self, nemesis: bool = False): def _run_mixed_workload(scylla_cluster_stats, nemesis): scylla_cluster_stats.run_mixed_workload(nemesis=nemesis) self.log.info("Running 'mixed' workload operation in parallel") object_set = ParallelObject(timeout=self.load_iteration_timeout_sec, objects=[ [scs, nemesis] for scs in self.scylla_clusters_stats]) object_set.run(func=_run_mixed_workload, unpack_objects=True, ignore_exceptions=False)
def run_benchmarks(self): try: parallel = ParallelObject(self._benchmark_runners, timeout=300) parallel.run(lambda x: x.run_benchmarks(), ignore_exceptions=True) except TimeoutError as exc: LOGGER.warning( "Run into TimeoutError during running benchmarks. Exception:\n%s", exc) self._collect_benchmark_output()
def install_benchmark_tools(self): try: parallel = ParallelObject(self._benchmark_runners, timeout=300) parallel.run(lambda x: x.install_benchmark_tools(), ignore_exceptions=True) except TimeoutError as exc: LOGGER.warning( "Ran into TimeoutError while installing benchmark tools: Exception:\n%s", exc)
def test_successful_parallel_run_func_returning_single_value(self): parallel_object = ParallelObject(self.rand_timeouts, timeout=30, num_workers=len(self.rand_timeouts)) results = parallel_object.run(dummy_func_return_single) returned_results = [r.result for r in results] self.assertListEqual(returned_results, self.rand_timeouts)
def test_successful_parallel_run_func_returning_tuple(self): parallel_object = ParallelObject(self.rand_timeouts, timeout=self.max_timout + 2, num_workers=len(self.rand_timeouts)) results = parallel_object.run(dummy_func_return_tuple) returned_results = [r.result for r in results] expected_results = [(timeout, 'test') for timeout in self.rand_timeouts] self.assertListEqual(returned_results, expected_results)
def test_successfull_parallel_run_func_accepted_list_as_parameter(self): parallel_object = ParallelObject(self.list_as_arg, timeout=30, num_workers=len(self.list_as_arg)) results = parallel_object.run(dummy_func_accepts_list_as_parameter) returned_results = [r.result for r in results] expected_results = [r[0][1] for r in self.list_as_arg] self.assertListEqual(returned_results, expected_results)
def test_unpack_kwargs_for_func(self): parallel_object = ParallelObject(self.unpacking_kwargs, timeout=30, num_workers=2) results = parallel_object.run(dummy_func_with_several_parameters) returned_results = [r.result for r in results] expected_results = [(d["timeout"], d["msg"]) for d in self.unpacking_kwargs] self.assertListEqual(returned_results, expected_results)
def test_unpack_args_for_func(self): parallel_object = ParallelObject(self.unpacking_args, timeout=30, num_workers=2) results = parallel_object.run(dummy_func_with_several_parameters) returned_results = [r.result for r in results] expected_results = [(timeout, msg) for timeout, msg in self.unpacking_args] # pylint: disable=unnecessary-comprehension self.assertListEqual(returned_results, expected_results)
def test_less_number_of_workers_than_length_of_iterable(self): parallel_object = ParallelObject(self.rand_timeouts, timeout=30, num_workers=2) results = parallel_object.run(dummy_func_return_tuple) returned_results = [r.result for r in results] expected_results = [(timeout, 'test') for timeout in self.rand_timeouts] self.assertListEqual(returned_results, expected_results)
def test_ignore_exception_by_timeout(self): parallel_object = ParallelObject(self.rand_timeouts, timeout=min(self.rand_timeouts)) results = parallel_object.run(dummy_func_return_tuple, ignore_exceptions=True) for res_obj in results: if res_obj.exc: self.assertIsNone(res_obj.result) self.assertIsInstance(res_obj.exc, concurrent.futures.TimeoutError) else: self.assertIsNone(res_obj.exc) self.assertIn(res_obj.result, [(timeout, 'test') for timeout in self.rand_timeouts])
def test_ignore_exception_raised_in_func_and_get_results(self): parallel_object = ParallelObject(self.rand_timeouts, timeout=self.max_timout + 2) results = parallel_object.run(dummy_func_raising_exception, ignore_exceptions=True) for res_obj in results: self.assertIsNotNone(res_obj.obj) if res_obj.exc: self.assertIsNone(res_obj.result) self.assertIsInstance(res_obj.exc, DummyException) else: self.assertIsNone(res_obj.exc) self.assertEqual(res_obj.result, "done")
def test_ignore_exception_raised_in_func_and_get_results(self): parallel_object = ParallelObject(self.rand_timeouts, timeout=30, num_workers=len(self.rand_timeouts)) results = parallel_object.run(dummy_func_raising_exception, ignore_exceptions=True) for res_obj in results: if res_obj.exc: self.assertIsNone(res_obj.result) self.assertIsInstance(res_obj.exc, DummyException) else: self.assertIsNone(res_obj.exc) self.assertListEqual(res_obj.result, "done")
def preload_data(self): def _preload_data(scylla_cluster_stats): prepare_write_cmd = scylla_cluster_stats.params.get('prepare_write_cmd') db_cluster_name = scylla_cluster_stats.db_cluster.name if not prepare_write_cmd: self.log.warning( "No prepare command defined in YAML for the '%s' cluster", db_cluster_name) return self.log.info("Running preload command for the '%s' cluster", db_cluster_name) scylla_cluster_stats.create_test_stats( sub_type='write-prepare', doc_id_with_timestamp=True) stress_queue, params = [], { 'prefix': 'preload-', } if self.params.get('round_robin'): self.log.debug( "'%s' DB cluster: Populating data using round_robin", db_cluster_name) params.update({'stress_num': 1, 'round_robin': True}) for stress_cmd in prepare_write_cmd: params.update({'stress_cmd': stress_cmd}) # Run all stress commands params.update(dict(stats_aggregate_cmds=False)) self.log.debug("'%s' DB cluster: RUNNING stress cmd: %s", db_cluster_name, stress_cmd) stress_queue.append(scylla_cluster_stats.run_stress_thread(**params)) for stress in stress_queue: scylla_cluster_stats.get_stress_results(queue=stress, store_results=False) scylla_cluster_stats.update_test_details() self.log.info("Running preload operation in parallel on all the DB clusters") object_set = ParallelObject( timeout=self.load_iteration_timeout_sec, objects=[[scs] for scs in self.scylla_clusters_stats], num_workers=len(self.scylla_clusters_stats), ) object_set.run(func=_preload_data, unpack_objects=True, ignore_exceptions=False)
def test_unpack_args_for_func(self): parallel_object = ParallelObject(self.unpacking_args, timeout=self.max_timout + 2, num_workers=2) results = parallel_object.run(dummy_func_with_several_parameters, unpack_objects=True) returned_results = [r.result for r in results] expected_results = [tuple(item) for item in self.unpacking_args] self.assertListEqual(returned_results, expected_results)
def test_successfull_parallel_run_func_accepted_list_as_parameter(self): parallel_object = ParallelObject(self.list_as_arg, timeout=self.max_timout + 2) results = parallel_object.run(dummy_func_accepts_list_as_parameter, unpack_objects=True) returned_results = [r.result for r in results] expected_results = [r[0][1] for r in self.list_as_arg] self.assertListEqual(returned_results, expected_results)
def test_parallel_object_exception_raised(self): with self.assertRaises(ParallelObjectException): parallel_object = ParallelObject(self.rand_timeouts, timeout=self.max_timout + 2) parallel_object.run(dummy_func_raising_exception)