def test_prompt_renders_all_questions(self): question1 = dbx.Stub() question1.name = 'foo' result1 = object() question2 = dbx.Stub() question2.name = 'bar' result2 = object() result = object() with dbx.Spy() as render: render.reset() render.render(question1, {}).returns(result1) render.render(question2, {'foo': result1}).returns(result2) result = prompt([question1, question2], render=render) self.assertEquals({'foo': result1, 'bar': result2}, result) dbx.assert_that( render.render, dbx.called().with_args(question1, dbx.ANY_ARG)) dbx.assert_that( render.render, dbx.called().with_args(question2, dbx.ANY_ARG))
def test_load_2x2_operands_in_2x2_processors(self): nprocs = 4 # given A = M2(1, 2, 3, 4) B = M2(5, 6, 7, 8) procs = [Spy(Cannon.Processor) for i in range(nprocs)] loader = OperationsI(procs) # when loader.load_processors(A, B) # then A_blocks = [M1(1), M1(2), M1(4), M1(3)] B_blocks = [M1(5), M1(8), M1(7), M1(6)] for i in range(nprocs): assert_that(procs[i].injectFirst, called().with_args(A_blocks[i], 0)) assert_that(procs[i].injectSecond, called().with_args(B_blocks[i], 0))
def test_load_2x2_operands_in_2x2_processors(self): nprocs = 4 # given A = M2(1, 2, 3, 4) B = M2(5, 6, 7, 8) procs = [Spy(Cannon.Processor) for i in range(nprocs)] frontend = FrontendI(procs) # when frontend.load_processors(A, B) # then A_blocks = [M1(1), M1(2), M1(4), M1(3)] B_blocks = [M1(5), M1(8), M1(7), M1(6)] for i in range(nprocs): assert_that(procs[i].injectA, called().with_args(A_blocks[i], 0)) assert_that(procs[i].injectB, called().with_args(B_blocks[i], 0))
def test_load_4x4_operands_in_2x2_processors(self): nprocs = 4 # given A = M4(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) B = M4(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32) procs = [Spy(Cannon.Processor) for i in range(nprocs)] frontend = FrontendI(procs) # when frontend.load_processors(A, B) # then A_blocks = [ M2(1, 2, 5, 6), M2(3, 4, 7, 8), M2(11, 12, 15, 16), M2(9, 10, 13, 14) ] B_blocks = [ M2(17, 18, 21, 22), M2(27, 28, 31, 32), M2(25, 26, 29, 30), M2(19, 20, 23, 24) ] for i in range(nprocs): assert_that(procs[i].injectA, called().with_args(A_blocks[i], 0)) assert_that(procs[i].injectB, called().with_args(B_blocks[i], 0))
def test_toplevel_load_dataset(dw_instances, profile): datadotworld.load_dataset('agentid/datasetid', profile=profile) assert_that( dw_instances[profile].load_dataset, called().times(1).with_args(equal_to('agentid/datasetid'), force_update=equal_to(False))) assert_that( dw_instances[profile].load_dataset, called().times(1).with_args(equal_to('agentid/datasetid'), auto_update=equal_to(False)))
def test_prepare_headers_works(self): request = self.get_request() self.c.prepare_headers(request, 200, {'content-type': 'x-test'}) assert_that(request.setResponseCode, called().with_args(200).times(1)) assert_that( request.setHeader, called().with_args('content-type', 'x-test').times(1) )
def test_processor_init_3x3_operands_in_3x3_processors(self): # given nprocs = 9 procs = [Spy(Cannon.Processor) for i in range(nprocs)] frontend = FrontendI(procs) # when frontend.init_processors() # then assert_that(procs[0].init, called().with_args(0, 3, procs[6], procs[2], anything())) assert_that(procs[1].init, called().with_args(1, 3, procs[7], procs[0], anything())) assert_that(procs[2].init, called().with_args(2, 3, procs[8], procs[1], anything())) assert_that(procs[3].init, called().with_args(3, 3, procs[0], procs[5], anything())) assert_that(procs[4].init, called().with_args(4, 3, procs[1], procs[3], anything())) assert_that(procs[5].init, called().with_args(5, 3, procs[2], procs[4], anything())) assert_that(procs[6].init, called().with_args(6, 3, procs[3], procs[8], anything())) assert_that(procs[7].init, called().with_args(7, 3, procs[4], procs[6], anything())) assert_that(procs[8].init, called().with_args(8, 3, procs[5], procs[7], anything()))
def test_spy(self): playername = "Kawhi Leonard" year = 2017 salary = "20m" #使用with关键字和Spy()来创建free spy #设置和salaryService一样的方法 with Spy() as free_ss_spy: free_ss_spy.set_salary(salary).returns("20m") #通过SUT调用spy对象的方法 pls.playerService(playername, 2017, ds.dataService(playername), pos.profileService(playername), bs.bodyService(), free_ss_spy).set_new_salary(salary) #验证spy_ss.set_salary方法被调用过 assert_that(free_ss_spy.set_salary, called()) #使用Spy(类对象)来创建spy spy_ss = Spy(ss.salaryService) #通过SUT调用spy对象的方法 pls.playerService(playername, 2017, ds.dataService(playername), pos.profileService(playername), bs.bodyService(), spy_ss).set_new_salary(salary) #验证spy_ss.set_salary方法被调用过 assert_that(spy_ss.set_salary, called()) #Spy是Stub的扩展,所以除了记录方法被调用的情况,也可以设定返回值 with Spy(bs.bodyService) as spy_bs_as_stub: spy_bs_as_stub.get_height().returns("188cm") spy_bs_as_stub.get_weight().returns("110kg") spy_bs_as_stub.illnessHistory(2017).returns("Year 2017 no injury") spy_bs_as_stub.illnessHistory(2018).returns("Year 2017 has ankle injury") #直接调用spy对象方法 spy_bs_as_stub.get_height() spy_bs_as_stub.get_weight() spy_bs_as_stub.illnessHistory(2017) spy_bs_as_stub.illnessHistory(2018) #可以验证spy对象方法已经被调用及其参数接受情况 assert_that(spy_bs_as_stub.get_height, called()) assert_that(spy_bs_as_stub.get_weight, called()) assert_that(spy_bs_as_stub.illnessHistory, called().times(2)) #使用anything()去任意匹配 assert_that(spy_bs_as_stub.illnessHistory, called().with_args(anything())) #通过SUT调用spy对象的方法 player_service_spy_2016 = pls.playerService(playername, 2017, ds.dataService(playername), pos.profileService(playername), spy_bs_as_stub, ss.salaryService()) player_service_spy_2016.get_physical_feature(2017) #验证spy对象方法再一次被方法(SUT)调用 called验证调用与否,times验证调用次数 assert_that(spy_bs_as_stub.get_height, called().times(2)) assert_that(spy_bs_as_stub.get_weight, called().times(2)) assert_that(spy_bs_as_stub.illnessHistory, called().times(3)) #传递实例给ProxySpy() spy_pos = ProxySpy(pos.profileService(playername)) #通过SUT调用spy对象的方法 pls.playerService(playername, 2016, ds.dataService(playername), spy_pos, bs.bodyService(), ss.salaryService()).get_player_info() #验证spy对象方法被调用过 assert_that(spy_pos.get_player_team, called())
def test_account_creation__3_accounts(self): with Stub(PasswordService) as password_service: password_service.generate().returns('some') store = Spy(AccountStore) service = AccountService(store, password_service) service.create_group('team', ['John', 'Peter', 'Alice']) assert_that(store.save, called().times(3)) assert_that(store.save, called().times(greater_than(2)))
def test_stub_delegates_list(self): with Stub(PasswordService) as password_service: password_service.generate().delegates(["12345", "mypass", "nothing"]) store = Spy(AccountStore) service = AccountService(store, password_service) service.create_group('team', ['John', 'Peter', 'Alice']) assert_that(store.save, called().with_args('John', '12345')) assert_that(store.save, called().with_args('Peter', 'mypass')) assert_that(store.save, called().with_args('Alice', 'nothing'))
def test_2x2_processors_2x2_operands(self): ''' initial shift: 1 2 1 2 5 6 5 8 3 4 < 4 3 7 8 7 6 ^ processors and collectors are distributed objects ''' P = [self.broker.add_servant(ProcessorI(), Cannon.ProcessorPrx) for i in range(4)] collector_servant = Mimic(Spy, Cannon.Collector) collector = self.broker.add_servant(collector_servant, Cannon.CollectorPrx) # by-hand shifted submatrices A0 = M1(1) A1 = M1(2) A2 = M1(4) A3 = M1(3) B0 = M1(5) B1 = M1(8) B2 = M1(7) B3 = M1(6) # by-hand processor initialization P[0].init(0, 2, P[2], P[1], collector) P[1].init(1, 2, P[3], P[0], collector) P[2].init(2, 2, P[0], P[3], collector) P[3].init(3, 2, P[1], P[2], collector) # by-hand processor loading P[0].injectA(A0, 0); P[0].injectB(B0, 0) P[1].injectA(A1, 0); P[1].injectB(B1, 0) P[2].injectA(A2, 0); P[2].injectB(B2, 0) P[3].injectA(A3, 0); P[3].injectB(B3, 0) wait_that(collector_servant.inject, called().times(4)) # expected result blocks C0 = M1(19) C1 = M1(22) C2 = M1(43) C3 = M1(50) assert_that(collector_servant.inject, called().with_args(0, C0, anything())) assert_that(collector_servant.inject, called().with_args(1, C1, anything())) assert_that(collector_servant.inject, called().with_args(2, C2, anything())) assert_that(collector_servant.inject, called().with_args(3, C3, anything()))
def test_ensure_connect_calls_execute_and_commit(self): getUtility_spy, store = self._prepare_store_spy() from mamba.enterprise import database _getUtility = database.getUtility database.getUtility = getUtility_spy self.database.store(ensure_connect=True) assert_that(store.execute, called().with_args('SELECT 1').times(1)) assert_that(store.commit, called().times(1)) database.getUtility = _getUtility
def test_asyncjson(self): consumer = Spy(Request) ajson = ProxySpy(asyncjson.AsyncJSON({'id': 1, 'name': 'Test'})) r = yield ajson.begin(consumer) for p in r: pass assert_that(consumer.registerProducer, called().times(1)) assert_that(consumer.write, called()) assert_that(consumer.unregisterProducer, called().times(1)) assert_that(ajson.begin, called()) self.flushLoggedErrors()
def test_stub_delegates_list(self): with Stub(PasswordService) as password_service: password_service.generate().delegates( ["12345", "mypass", "nothing"]) store = Spy(AccountStore) service = AccountService(store, password_service) service.create_group('team', ['John', 'Peter', 'Alice']) assert_that(store.save, called().with_args('John', '12345')) assert_that(store.save, called().with_args('Peter', 'mypass')) assert_that(store.save, called().with_args('Alice', 'nothing'))
def test_processor_init_2x2_operands_in_2x2_processors(self): nprocs = 4 procs = [Spy(Cannon.Processor) for i in range(nprocs)] loader = OperationsI(procs) # when loader.init_processors() # then assert_that(procs[0].init, called().with_args(0, 0, procs[2], procs[1], 2, anything())) assert_that(procs[1].init, called().with_args(0, 1, procs[3], procs[0], 2, anything())) assert_that(procs[2].init, called().with_args(1, 0, procs[0], procs[3], 2, anything())) assert_that(procs[3].init, called().with_args(1, 1, procs[1], procs[2], 2, anything()))
def test_account_creation__argument_values(self): with Stub(PasswordService) as password_service: password_service.generate().returns('some') store = Spy(AccountStore) service = AccountService(store, password_service) service.create_user('John') assert_that(store.save, called().with_args('John', 'some')) assert_that(store.save, called().with_args('John', ANY_ARG)) assert_that(store.save, never(called().with_args('Alice', anything()))) assert_that(store.save, called().with_args(contains_string('oh'), ANY_ARG))
def test_2x2_processors_2x2_operands(self): ''' initial shift: 1 2 1 2 5 6 5 8 3 4 < 4 3 7 8 7 6 ^ processors and collector are distributed objects ''' P = [self.broker.add_servant(ProcessorI(), Cannon.ProcessorPrx) for i in range(4)] collector_servant = Mimic(Spy, Cannon.Collector) collector = self.broker.add_servant(collector_servant, Cannon.CollectorPrx) # by-hand shifted submatrices A00 = M1(1) A01 = M1(2) A10 = M1(4) A11 = M1(3) B00 = M1(5) B01 = M1(8) B10 = M1(7) B11 = M1(6) # by-hand processor initialization P[0].init(0, 0, P[2], P[1], 2, collector) P[1].init(0, 1, P[3], P[0], 2, collector) P[2].init(1, 0, P[0], P[3], 2, collector) P[3].init(1, 1, P[1], P[2], 2, collector) # by-hand processor loading P[0].injectFirst(A00, 0); P[0].injectSecond(B00, 0) P[1].injectFirst(A01, 0); P[1].injectSecond(B01, 0) P[2].injectFirst(A10, 0); P[2].injectSecond(B10, 0) P[3].injectFirst(A11, 0); P[3].injectSecond(B11, 0) wait_that(collector_servant.injectSubmatrix, called().times(4)) # expected result blocks C00 = M1(19) C01 = M1(22) C10 = M1(43) C11 = M1(50) assert_that(collector_servant.injectSubmatrix, called().with_args(C00, 0, 0, anything())) assert_that(collector_servant.injectSubmatrix, called().with_args(C01, 0, 1, anything())) assert_that(collector_servant.injectSubmatrix, called().with_args(C10, 1, 0, anything())) assert_that(collector_servant.injectSubmatrix, called().with_args(C11, 1, 1, anything()))
def test_processor_init_2x2_operands_in_2x2_processors(self): # given nprocs = 4 procs = [Spy(Cannon.Processor) for i in range(nprocs)] frontend = FrontendI(procs) # when frontend.init_processors() # then assert_that(procs[0].init, called().with_args(0, 2, procs[2], procs[1], anything())) assert_that(procs[1].init, called().with_args(1, 2, procs[3], procs[0], anything())) assert_that(procs[2].init, called().with_args(2, 2, procs[0], procs[3], anything())) assert_that(procs[3].init, called().with_args(3, 2, procs[1], procs[2], anything()))
def test_ensure_connect_calls_rollback_on_disconnectionerror(self): getUtility_spy, store = self._prepare_store_spy(True) from mamba.enterprise import database _getUtility = database.getUtility database.getUtility = getUtility_spy self.database.store(ensure_connect=True) assert_that(store.execute, called().with_args("SELECT 1").times(1)) assert_that(store.rollback, called().times(1)) assert_that(store.commit, called().times(0)) database.getUtility = _getUtility
def test_load_dataset_existing_forced(self, api_client, dw, dataset_key): dataset = dw.load_dataset(dataset_key, force_update=True) assert_that( api_client.download_datapackage, called().times(1).with_args(equal_to(dataset_key), anything())) assert_that(dataset.raw_data, has_length(4))
async def test_process_lines_multiple_streams(self, target, api_client, test_files_path): with open(path.join(test_files_path, 'fixerio-multistream.jsonl')) as file: async for _ in target.process_lines(file): # noqa: F841 pass assert_that(api_client.append_stream_chunked, called().times(2))
async def test_process_new_version(self, target, api_client, test_files_path): with open(path.join(test_files_path, 'fixerio-new-version.jsonl')) as file: async for _ in target.process_lines(file): # noqa: F841 pass assert_that(api_client.truncate_stream_records, called().times(1))
def test_delete_files(self, api_client, datasets_api, dataset_key): files = ['filename.ext'] api_client.delete_files(dataset_key, files) assert_that(datasets_api.delete_files_and_sync_sources, called().times(1).with_args(equal_to('agentid'), equal_to('datasetid'), equal_to(files)))
def test_is_displayed_asks_the_element_if_it_is_displayed(self): matcher = is_displayed() selenium_element = Spy() matcher.matches(selenium_element) assert_that(selenium_element.is_displayed, is_(called()))
def test_upload_files(self, api_client, uploads_api, dataset_key): files = ['filename.ext'] api_client.upload_files(dataset_key, files) assert_that(uploads_api.upload_files, called().times(1).with_args(equal_to('agentid'), equal_to('datasetid'), equal_to(files)))
def test_update_dataset(self, api_client, datasets_api, dataset_key): patch_request = {'tags': ['tag1', 'tag2']} api_client.update_dataset(dataset_key, **patch_request) assert_that(datasets_api.patch_dataset, called().times(1).with_args(equal_to('agentid'), equal_to('datasetid'), has_properties(patch_request)))
def test_get_insight_for_project(self, api_client, insights_api, project_key): api_client.get_insights_for_project(project_key) assert_that( insights_api.get_insights_for_project, called().times(1).with_args(equal_to('agentid'), equal_to('projectid')))
def test_processors_6x6_block(self): # given P0 = ProcessorI() collector = Spy() A = M6( 1 ,2 ,3 ,4 ,5 ,6 ,7 ,8 ,9 ,10 ,11 ,12 ,13 ,14 ,15 ,16 ,17 ,18 ,19 ,20 ,21 ,22 ,23 ,24 ,25 ,26 ,27 ,28 ,29 ,30 ,31 ,32 ,33 ,34 ,35,36 ) B = M6(36 ,35 ,34 ,33 ,32 ,31 ,30 ,29 ,28 ,27 ,26 ,25 ,24 ,23 ,22 ,21 ,20 ,19 ,18 ,17 ,16 ,15 ,14 ,13 ,12 ,11 ,10 ,9 ,8 ,7 ,6 ,5 ,4 ,3 ,2 ,1 ) C = M6(336 ,315 ,294 ,273 ,252 ,231 ,1092 ,1035 ,978 ,921 ,864 ,807 ,1848 ,1755 ,1662 ,1569 ,1476 ,1383 ,2604 ,2475 ,2346 ,2217 ,2088 ,1959 ,3360 ,3195 ,3030 ,2865 ,2700 ,2535 ,4116 ,3915 ,3714 ,3513 ,3312 ,3111) # when P0.init(0, 1, None, None, collector) P0.injectA(A, 0) P0.injectB(B, 0) # then assert_that(collector.inject, called().with_args(0, C, ANY_ARG))
def test_upload_file(self, api_client, uploads_api, dataset_key): name = 'filename.ext' api_client.upload_file(dataset_key, name) assert_that(uploads_api.upload_file, called().times(1).with_args(equal_to('agentid'), equal_to('datasetid'), equal_to(name)))
def test_publish_a_message_to_a_queue(self): backend = a_backend() registry = SerializerRegistry(serializer_settings) sut = QueuePublisher(backend=backend, serializer_registry=registry) sut.publish("queue", {"some_content": "12345"}) assert_that(backend.publish_to_queue, called().times(1))
def test_delete_insight(self, api_client, insights_api, project_key, insight_id='insightid'): api_client.delete_insight(project_key, insight_id) assert_that(insights_api.delete_insight, called().times(1).with_args(equal_to('agentid'), equal_to('projectid'), equal_to(insight_id)))
def test_any_arg_checking_works_when_eq_raises(self): with Spy(Collaborator) as spy: spy.method_accepting_property(ANY_ARG).returns(6) assert_that(spy.method_accepting_property(RaisingEq()), is_(6)) assert_that(spy.method_accepting_property, called().with_args(instance_of(RaisingEq)))
def test_configure(): runner = CliRunner() config = Spy(Config) runner.invoke(cli.configure, input='token\n', obj={'config': config}) assert_that(config, property_set('auth_token').to('token')) assert_that(config.save, called())
def test_sparql(self, api_client, dataset_key, sparql_api): api_client.sparql(dataset_key, 'query', sparql_api_mock=sparql_api) assert_that( sparql_api.sparql_post, called().times(1).with_args('agentid', 'datasetid', 'query', sparql_api_mock=sparql_api))
def test_memoized(self, free_spy): @memoized(key_mapper=lambda first_arg, _: first_arg) def invoke_spy(first_arg, second_arg): return free_spy.method(first_arg, second_arg) assert_that(invoke_spy('test', 'not_in_key'), equal_to(invoke_spy('test', 'should_not_matter'))) assert_that(free_spy.method, called().times(1))
def test_remove_linked_dataset(self, api_client, projects_api, project_key, dataset_key): api_client.remove_linked_dataset(project_key, dataset_key) assert_that(projects_api.remove_linked_dataset, called().times(1).with_args(equal_to('agentid'), equal_to('projectid'), equal_to('agentid'), equal_to('datasetid')))
def test_replace_insight(self, api_client, insights_api, project_key, insight_id='insightid'): replace_request = {'title': 'Replace Insight', 'image_url': 'https://image.com/'} api_client.replace_insight(project_key, insight_id, **replace_request) assert_that(insights_api.replace_insight, called().times(1))
def test_chain_default_behavior(self): stub = doublex.Stub() doublex.set_default_behavior(stub, doublex.Spy) chained_spy = stub.foo() chained_spy.bar() doublex.assert_that(chained_spy.bar, doublex.called())
def test_uses_chooses_right_validator(self): with Spy() as validators: validators.get(ANY_ARG).returns(lambda number: None) self.number.validator(VALID_NUMBER, VALID_COUNTRY_CODE, validators=validators) assert_that(validators.get, was(called().with_args(NORMALIZED_COUNTRY_CODE)))
def test_configure(): runner = CliRunner() config = Spy(FileConfig) runner.invoke(cli.configure, input='token\n', obj={'config': config}) assert_that(config, property_set('auth_token').to('token')) assert_that(config.save, called())
def test_replace_dataset(self, api_client, datasets_api, dataset_key): replace_request = {'visibility': 'OPEN'} api_client.replace_dataset(dataset_key, **replace_request) assert_that( datasets_api.replace_dataset, called().times(1).with_args(equal_to('agentid'), equal_to('datasetid'), has_properties(replace_request)))
def test_replace_dataset(self, api_client, datasets_api, dataset_key): replace_request = {'visibility': 'OPEN'} api_client.replace_dataset(dataset_key, **replace_request) assert_that(datasets_api.replace_dataset, called().times(1).with_args(equal_to('agentid'), equal_to('datasetid'), has_properties( replace_request)))
def test_load_4x4_operands_in_2x2_processors(self): nprocs = 4 # given A = M4(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) B = M4(17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32) procs = [Spy(Cannon.Processor) for i in range(nprocs)] loader = OperationsI(procs) # when loader.load_processors(A, B) # then A_blocks = [ M2(1, 2, 5, 6), M2(3, 4, 7, 8), M2(11, 12, 15, 16), M2(9, 10, 13, 14)] B_blocks = [ M2(17, 18, 21, 22), M2(27, 28, 31, 32), M2(25, 26, 29, 30), M2(19, 20, 23, 24)] for i in range(nprocs): assert_that(procs[i].injectFirst, called().with_args(A_blocks[i], 0)) assert_that(procs[i].injectSecond, called().with_args(B_blocks[i], 0))
def test_processors_rings(self): # given P0 = ProcessorI() P1 = Spy() P2 = Spy() collector = Stub() A0 = M1(1) B0 = M1(5) # when P0.init(1, 1, P2, P1, 2, collector) P0.injectFirst(A0, 0) P0.injectSecond(B0, 0) # then assert_that(P1.injectFirst, called().async(1).with_args(A0, 1, ANY_ARG)) assert_that(P2.injectSecond, called().async(1).with_args(B0, 1, ANY_ARG))
def test_uses_parser_to_parse_input(self): with Spy() as spy: parse_return = [NORMALIZED_COUNTRY_CODE, VALID_NUMBER] spy.parser(ANY_ARG).returns(parse_return) self.number.parse = spy.parser self.number.validate(VALID_VAT) assert_that(spy.parser, was(called().with_args(VALID_VAT)))
def test_collector_called(self): # given processor = self.broker.add_servant(ProcessorI(), Cannon.ProcessorPrx) collector_servant = Mimic(Spy, Cannon.Collector) collector = self.broker.add_servant(collector_servant, Cannon.CollectorPrx) A = M2(1, 2, 3, 4) B = M2(5, 6, 7, 8) # when processor.init(1, 1, None, None, 1, collector) processor.injectFirst(A, 0) processor.injectSecond(B, 0) # then C = M2(19, 22, 43, 50) assert_that(collector_servant.injectSubmatrix, called().async(1).with_args(C, 1, 1, anything()))
def test_account_creation__restricted_stub(self): with Stub(PasswordService) as password_service: password_service.generate().returns('some') store = Spy(AccountStore) service = AccountService(store, password_service) service.create_user('John') assert_that(store.save, called())