def test_shared_csv(self): concurrency = 2 script = os.path.join(RESOURCES_DIR, "test_csv_records.py") outfile = tempfile.NamedTemporaryFile() report = outfile.name + "-%s.csv" outfile.close() params = Params() params.concurrency = concurrency params.iterations = 6 params.report = report params.tests = [script] params.worker_count = 1 sup = Supervisor(params) sup.start() sup.join() content = [] for i in range(params.worker_count): with open(report % i) as f: content.extend(f.readlines()[1:]) content = [item.split(",")[3] for item in content] with open(os.path.join(RESOURCES_DIR, "data/source2.csv")) as csv: target_data = csv.readlines() target_data = [line.strip() for line in target_data] target_vus = [str(vu) for vu in range(concurrency)] real_vus = [record.split(':')[0] for record in content] self.assertEqual(set(target_vus), set(real_vus)) # all VUs participated real_data = [record.split(':')[1] for record in content] self.assertEqual(set(target_data), set(real_data)) # all data has been read self.assertEqual(len(target_data), len(real_data))
def test_apiritif_no_loop_multiple_records(self): script = os.path.join(RESOURCES_DIR, "test_csv_records.py") outfile = tempfile.NamedTemporaryFile() report = outfile.name + "-%s.csv" outfile.close() params = Params() params.concurrency = 5 # more than records in csv params.iterations = 10 params.report = report params.tests = [script] params.worker_count = 1 sup = Supervisor(params) sup.start() sup.join() content = [] for i in range(params.worker_count): with open(report % i) as f: content.extend(f.readlines()[1:]) content = [item.split(",")[6] for item in content] with open(os.path.join(RESOURCES_DIR, "data/source2.csv")) as csv: self.assertEqual(len(content), len(csv.readlines())) # equals record number in csv for line in content: self.assertTrue("true" in line)
def test_two_readers(self): """ check different reading speed, fieldnames and separators """ script = os.path.join(RESOURCES_DIR, "test_two_readers.py") outfile = tempfile.NamedTemporaryFile() report = outfile.name + "-%s.csv" outfile.close() params = Params() params.concurrency = 2 params.iterations = 3 params.report = report params.tests = [script] params.worker_count = 1 sup = Supervisor(params) sup.start() sup.join() content = [] for i in range(params.worker_count): with open(report % i) as f: content.extend(f.readlines()[1::2]) threads = {"0": [], "1": []} content = [item[item.index('"') + 1:].strip() for item in content] for item in content: threads[item[0]].append(item[2:]) target = { # reader1 runs two times faster "0": ["0. u,ser0:000:ze:00", "1. u,ser0:000:tu:22", "0. user2:2:fo:44", "1. user2:2:si:66", "0. user4:4:ze:00", "1. user4:4:tu:22"], "1": ["0. user1:1:on:11", "1. user1:1:th:33", "0. user3:3:fi:55", "1. user3:3:se:77", "0. user5:5:on:11", "1. user5:5:th:33"]} self.assertEqual(threads, target)
def test_ramp_up1(self): outfile = tempfile.NamedTemporaryFile() print(outfile.name) params1 = Params() params1.concurrency = 50 params1.report = outfile.name params1.tests = dummy_tests params1.ramp_up = 60 params1.steps = 5 params1.worker_count = 2 params1.worker_index = 0 worker1 = Worker(params1) res1 = [x.delay for x in worker1._get_thread_params()] print(res1) self.assertEquals(params1.concurrency, len(res1)) params2 = copy.deepcopy(params1) params2.worker_index = 1 worker2 = Worker(params2) res2 = [x.delay for x in worker2._get_thread_params()] print(res2) self.assertEquals(params2.concurrency, len(res2)) print(sorted(res1 + res2))
def test_apiritif_without_loop(self): """ check different reading speed, fieldnames and separators """ script = os.path.dirname( os.path.realpath(__file__)) + "/resources/test_reader_no_loop.py" outfile = tempfile.NamedTemporaryFile() report = outfile.name + "-%s.csv" outfile.close() print(report) params = Params() params.concurrency = 1 params.iterations = 10 params.report = report params.tests = [script] params.worker_count = 1 sup = Supervisor(params) sup.start() sup.join() content = [] for i in range(params.worker_count): with open(report % i) as f: content.extend(f.readlines()[1::2]) threads = {"0": []} content = [item[item.index('"') + 1:].strip() for item in content] for item in content: threads[item[0]].append(item[2:]) self.assertEqual(18, len(threads["0"]))
def test_empty_test_file(self): outfile = tempfile.NamedTemporaryFile() params = Params() params.concurrency = 1 params.iterations = 1 params.report = outfile.name params.tests = [os.path.join(RESOURCES_DIR, "test_invalid.py")] worker = Worker(params) self.assertRaises(RuntimeError, worker.start)
def test_thread(self): outfile = tempfile.NamedTemporaryFile() params = Params() params.concurrency = 2 params.iterations = 10 params.report = outfile.name params.tests = dummy_tests worker = Worker(params) worker.run_nose(params)
def test_empty_worker(self): outfile = tempfile.NamedTemporaryFile() params = Params() params.concurrency = 2 params.iterations = 10 params.report = outfile.name params.tests = [] worker = Worker(params) self.assertRaises(RuntimeError, worker.start)
def test_worker(self): outfile = tempfile.NamedTemporaryFile() params = Params() params.concurrency = 2 params.iterations = 10 params.report = outfile.name params.tests = dummy_tests worker = Worker(params) worker.start() worker.join()
def test_supervisor(self): outfile = tempfile.NamedTemporaryFile() params = Params() params.tests = dummy_tests params.report = outfile.name + "%s" params.concurrency = 9 params.iterations = 5 sup = Supervisor(params) sup.start() while sup.is_alive(): time.sleep(1)
def test_loadgen(self): params = Params() params.iterations = 1 params.concurrency = 1 params.report = 'log.ldjson' params.tests = dummy_tests worker = Worker(params) worker.run_nose(params) action_handlers = thread.get_from_thread_store('action_handlers') plugin = action_handlers.pop(0) self.assertTrue(plugin.started) self.assertTrue(plugin.ended)
def test_writers_x3(self): # writers must: # 1. be the same for threads of one process # 2. be set up only once # 3. be different for different processes def dummy_worker_init(self, params): """ :type params: Params """ super(Worker, self).__init__(params.concurrency) self.params = params store.writer = DummyWriter(self.params.report, self.params.workers_log) outfile = tempfile.NamedTemporaryFile() outfile.close() params = Params() # use this log to spy on writers workers_log = outfile.name + '-workers.log' params.workers_log = workers_log params.tests = [ os.path.join(os.path.dirname(__file__), "resources", "test_smart_transactions.py") ] params.report = outfile.name + "%s" # it causes 2 processes and 3 threads (totally) params.concurrency = 3 params.worker_count = 2 params.iterations = 2 saved_worker_init = Worker.__init__ Worker.__init__ = dummy_worker_init try: sup = Supervisor(params) sup.start() while sup.isAlive(): time.sleep(1) with open(workers_log) as log: writers = log.readlines() self.assertEqual(2, len(writers)) self.assertNotEqual(writers[0], writers[1]) finally: Worker.__init__ = saved_worker_init os.remove(workers_log) for i in range(params.worker_count): os.remove(params.report % i)
def test_empty_supervisor(self): outfile = tempfile.NamedTemporaryFile() params = Params() params.tests = [] params.report = outfile.name + "%s" params.concurrency = 9 params.iterations = 5 sup = Supervisor(params) sup.start() while sup.is_alive(): time.sleep(1) self.assertEqual(CLOSE, sup.workers._state)
def test_setup_errors(self): error_tests = [os.path.join(RESOURCES_DIR, "test_setup_errors.py")] outfile = tempfile.NamedTemporaryFile() params = Params() params.concurrency = 1 params.iterations = 1 params.report = outfile.name params.tests = error_tests params.verbose = True worker = Worker(params) self.assertRaises(BaseException, worker.run_nose, params)
def test_unicode_ldjson(self): outfile = tempfile.NamedTemporaryFile(suffix=".ldjson") print(outfile.name) params = Params() params.concurrency = 2 params.iterations = 1 params.report = outfile.name params.tests = dummy_tests worker = Worker(params) worker.start() worker.join() with open(outfile.name) as fds: print(fds.read())
def test_ramp_up2(self): outfile = tempfile.NamedTemporaryFile() params1 = Params() params1.concurrency = 50 params1.report = outfile.name params1.tests = dummy_tests params1.ramp_up = 60 params1.worker_count = 1 params1.worker_index = 0 worker1 = Worker(params1) res1 = [x.delay for x in worker1._get_thread_params()] self.assertEquals(params1.concurrency, len(res1))
def test_unicode_ldjson(self): outfile = tempfile.NamedTemporaryFile(suffix=".ldjson") params = Params() params.concurrency = 2 params.iterations = 1 params.report = outfile.name params.tests = dummy_tests worker = Worker(params) worker.start() worker.join() with open(outfile.name) as fds: result = fds.readlines() self.assertEqual(4, len(result))
def test_empty_worker(self): outfile = tempfile.NamedTemporaryFile() print(outfile.name) params = Params() params.concurrency = 2 params.iterations = 10 params.report = outfile.name params.tests = [] worker = Worker(params) worker.close = self.get_required_method( worker.close) # check whether close has been called try: worker.start() except: # assertRaises doesn't catch it pass self.assertTrue(self.required_method_called)
def test_threads_and_processes(self): """ check if threads and processes can divide csv fairly """ script = os.path.dirname( os.path.realpath(__file__)) + "/resources/test_thread_reader.py" outfile = tempfile.NamedTemporaryFile() report = outfile.name + "-%s.csv" outfile.close() print(report) params = Params() params.concurrency = 4 params.iterations = 2 params.report = report params.tests = [script] params.worker_count = 2 sup = Supervisor(params) sup.start() sup.join() content = [] for i in range(params.worker_count): with open(report % i) as f: content.extend(f.readlines()[1::2]) threads = {"0": [], "1": [], "2": [], "3": []} content = [item[item.index('"') + 1:].strip() for item in content] for item in content: self.assertEqual(item[0], item[2]) # thread equals target self.assertEqual("a", item[-1]) # age is the same if item[6] == "0": self.assertEqual(-1, item.find('+')) else: self.assertNotEqual(-1, item.find('+')) # name value is modified threads[item[0]].append(item[9:-2]) # format: <user>:<pass>, quoting ignored target = { '0': ['""u:ser0""', '""u+:ser0""', 'user4:4', 'user4+:4'], '1': ['""user1"":1', '""user1""+:1', 'user5:5', 'user5+:5'], '2': ['user2:""2""', 'user2+:""2""', '""u:ser0""', '""u+:ser0""'], '3': ['user3:3', 'user3+:3', '""user1"":1', '""user1""+:1'] } self.assertEqual(threads, target)
def test_setup_errors(self): error_tests = [ os.path.join(os.path.dirname(__file__), "resources", "test_setup_errors.py") ] outfile = tempfile.NamedTemporaryFile() print(outfile.name) params = Params() params.concurrency = 1 params.iterations = 1 params.report = outfile.name params.tests = error_tests params.verbose = True worker = Worker(params) self.assertRaises(RuntimeError, worker.run_nose, params) with open(outfile.name, 'rt') as _file: _file.read()
def test_thread_proc(self): log = "/tmp/apiritif.log" if os.path.exists(log): os.remove(log) script = os.path.dirname( os.path.realpath(__file__)) + "/resources/test_requests.py" outfile = tempfile.NamedTemporaryFile() report = outfile.name + "%s.csv" outfile.close() print(report) params = Params() params.concurrency = 3 params.iterations = 2 params.report = report params.tests = [script] params.worker_count = 2 sup = Supervisor(params) sup.start() sup.join() with open(log) as f: content = f.readlines() a = 1 + 1
def test_setup_teardown_graceful(self): error_tests = [ os.path.join(RESOURCES_DIR, "setup_teardown_graceful.py") ] outfile = tempfile.NamedTemporaryFile() params = Params() params.concurrency = 1 params.iterations = 1 params.report = outfile.name params.tests = error_tests params.verbose = True worker = Worker(params) worker.run_nose(params) # todo: fix result of "samples = self.apiritif_extractor.parse_recording(recording, sample)" test_result = apiritif.get_from_thread_store('test_result') sample = [ '1. setup1', '2. setup2', '3. main1', '4. main2', '5. teardown1', '6. teardown2' ] self.assertEqual(sample, test_result)
def test_reader_without_loop_non_stop(self): """ check different reading speed, fieldnames and separators """ script = os.path.dirname( os.path.realpath(__file__)) + "/resources/test_reader_no_loop.py" outfile = tempfile.NamedTemporaryFile() report = outfile.name + "-%s.csv" outfile.close() print(report) params = Params() params.concurrency = 1 params.iterations = 10 params.report = report params.tests = [script] params.worker_count = 1 handler = ApiritifPlugin.handleError try: # wrong handler: doesn't stop iterations ApiritifPlugin.handleError = lambda a, b, c: False sup = Supervisor(params) sup.start() sup.join() finally: ApiritifPlugin.handleError = handler content = [] for i in range(params.worker_count): with open(report % i) as f: content.extend(f.readlines()[1::2]) threads = {"0": []} content = [item[item.index('"') + 1:].strip() for item in content] for item in content: threads[item[0]].append(item[2:]) self.assertTrue(len(threads["0"]) > 18)
def test_handlers(self): # handlers must: # 1. be unique for thread # 2. be set up every launch of test suite def log_line(line): with open(thread.handlers_log, 'a') as log: log.write("%s\n" % line) def mock_get_handlers(): transaction_handlers = thread.get_from_thread_store( 'transaction_handlers') if not transaction_handlers: transaction_handlers = {'enter': [], 'exit': []} length = "%s/%s" % (len(transaction_handlers['enter']), len(transaction_handlers['exit'])) log_line("get: {pid: %s, idx: %s, iteration: %s, len: %s}" % (os.getpid(), thread.get_index(), thread.get_iteration(), length)) return transaction_handlers def mock_set_handlers(handlers): log_line("set: {pid: %s, idx: %s, iteration: %s, handlers: %s}," % (os.getpid(), thread.get_index(), thread.get_iteration(), handlers)) thread.put_into_thread_store(transaction_handlers=handlers) outfile = tempfile.NamedTemporaryFile() outfile.close() params = Params() # use this log to spy on writers handlers_log = outfile.name + '-handlers.log' thread.handlers_log = handlers_log params.tests = [ os.path.join(os.path.dirname(__file__), "resources", "test_smart_transactions.py") ] params.report = outfile.name + "%s" # it causes 2 processes and 3 threads (totally) params.concurrency = 3 params.worker_count = 2 params.iterations = 2 saved_get_handlers = apiritif.get_transaction_handlers saved_set_handlers = apiritif.set_transaction_handlers apiritif.get_transaction_handlers = mock_get_handlers apiritif.set_transaction_handlers = mock_set_handlers try: sup = Supervisor(params) sup.start() while sup.isAlive(): time.sleep(1) with open(handlers_log) as log: handlers = log.readlines() self.assertEqual(36, len(handlers)) self.assertEqual( 6, len([ handler for handler in handlers if handler.startswith('set') ])) self.assertEqual( 0, len([ handler for handler in handlers if handler.endswith('2/2}') ])) finally: apiritif.get_transaction_handlers = saved_get_handlers apiritif.set_transaction_handlers = saved_set_handlers os.remove(handlers_log) for i in range(params.worker_count): os.remove(params.report % i)