def test_blobchunkworker_run(tmpdir): lpath = str(tmpdir.join("test.tmp")) with open(lpath, "wt") as f: f.write(str(uuid.uuid4())) args = MagicMock() args.pageblob = True args.autovhd = False args.timeout = None session = requests.Session() adapter = requests_mock.Adapter() session.mount("mock", adapter) exc_list = [] flock = threading.Lock() sa_in_queue = queue.Queue() sa_out_queue = queue.Queue() with requests_mock.mock() as m: m.put("mock://blobepcontainer/blob?saskey", status_code=200) sbs = blobxfer.SasBlobService("mock://blobep", "saskey", None) bcw = blobxfer.BlobChunkWorker(exc_list, sa_in_queue, sa_out_queue, args, sbs, True) with pytest.raises(IOError): bcw.putblobdata(lpath, "container", "blob", "blockid", 0, 4, flock, None) args.pageblob = False with requests_mock.mock() as m: m.put("mock://blobepcontainer/blob?saskey", status_code=201) sbs = blobxfer.SasBlobService("mock://blobep", "saskey", None) bcw = blobxfer.BlobChunkWorker(exc_list, sa_in_queue, sa_out_queue, args, sbs, True) try: bcw.putblobdata(lpath, "container", "blob", "blockid", 0, 4, flock, None) except Exception: pytest.fail("unexpected Exception raised") m.get("mock://blobepcontainer/blob?saskey", status_code=200) try: bcw.getblobrange(lpath, "container", "blob", 0, 4, flock, None) except Exception: pytest.fail("unexpected Exception raised") # test zero-length putblob bcw.putblobdata(lpath, "container", "blob", "blockid", 0, 0, flock, None) bcw._pageblob = True bcw.putblobdata(lpath, "container", "blob", "blockid", 0, 0, flock, None) # test empty page with open(lpath, "wb") as f: f.write(b"\0" * 4 * 1024 * 1024) bcw.putblobdata(lpath, "container", "blob", "blockid", 0, 4 * 1024 * 1024, flock, None) with open(lpath, "wb") as f: f.write(b"\0" * 4 * 1024) bcw.putblobdata(lpath, "container", "blob", "blockid", 0, 4 * 1024, flock, None) sa_in_queue.put((lpath, "container", "blob", "blockid", 0, 4, flock, None)) with requests_mock.mock() as m: sbs = blobxfer.SasBlobService("mock://blobep", "saskey", None) bcw = blobxfer.BlobChunkWorker(exc_list, sa_in_queue, sa_out_queue, args, sbs, False) m.get("mock://blobepcontainer/blob?saskey", status_code=201) bcw.run() assert len(exc_list) > 0
def test_scihub_unresponsive(): timeout_connect = 6 timeout_read = 6.6 timeout = (timeout_connect, timeout_read) api = SentinelAPI("mock_user", "mock_password", timeout=timeout) with requests_mock.mock() as rqst: rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ConnectTimeout) with pytest.raises(requests.exceptions.ConnectTimeout): api.query(**_small_query) with pytest.raises(requests.exceptions.ConnectTimeout): api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') with pytest.raises(requests.exceptions.ConnectTimeout): api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') with pytest.raises(requests.exceptions.ConnectTimeout): api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b']) with requests_mock.mock() as rqst: rqst.request(requests_mock.ANY, requests_mock.ANY, exc=requests.exceptions.ReadTimeout) with pytest.raises(requests.exceptions.ReadTimeout): api.query(**_small_query) with pytest.raises(requests.exceptions.ReadTimeout): api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') with pytest.raises(requests.exceptions.ReadTimeout): api.download('8df46c9e-a20c-43db-a19a-4240c2ed3b8b') with pytest.raises(requests.exceptions.ReadTimeout): api.download_all(['8df46c9e-a20c-43db-a19a-4240c2ed3b8b'])
def test_gen_responses(self): step = 3000 start = 1426120000 end = 1426147000 groups1 = [[self.metric1, self.metric2]] payload = self.bfc.gen_payload(start, end, 'FULL') endpoint = self.bfc.get_multi_endpoint(self.finder.bf_query_endpoint, self.finder.tenant) # test 401 error with requests_mock.mock() as m: m.post(endpoint, json={}, status_code=401) responses = self.bfc.gen_responses(groups1, payload) self.assertTrue(responses == []) #test single group _, responses = self.make_data(start, step) with requests_mock.mock() as m: m.post(endpoint, json={'metrics':responses}, status_code=200) new_responses = self.bfc.gen_responses(groups1, payload) self.assertTrue(responses == new_responses) #test multiple groups groups2 = [[self.metric1], [self.metric2]] with requests_mock.mock() as m: global json_data json_data = [{'metrics':responses[0:1]},{'metrics':responses[1:]}] def json_callback(request, context): global json_data response = json_data[0] json_data = json_data[1:] return response m.post(endpoint, json=json_callback, status_code=200) new_responses = self.bfc.gen_responses(groups2, payload) self.assertTrue(responses == new_responses)
def test_blobchunkworker_run(tmpdir): lpath = str(tmpdir.join('test.tmp')) with open(lpath, 'wt') as f: f.write(str(uuid.uuid4())) exc_list = [] sa_in_queue = queue.Queue() sa_out_queue = queue.Queue() flock = threading.Lock() sa_in_queue.put((True, lpath, 'blobep', 'saskey', 'container', 'blob', 'blockid', 0, 4, flock, None)) sa_in_queue.put((False, lpath, 'blobep', 'saskey', 'container', 'blob', 'blockid', 0, 4, flock, None)) args = MagicMock() args.pageblob = False args.autovhd = False args.timeout = None session = requests.Session() adapter = requests_mock.Adapter() session.mount('mock', adapter) with requests_mock.mock() as m: m.put('mock://blobepcontainer/blob?saskey', status_code=201) sbs = blobxfer.SasBlobService('mock://blobep', 'saskey', None) bcw = blobxfer.BlobChunkWorker( exc_list, sa_in_queue, sa_out_queue, args, sbs) try: bcw.putblobdata(lpath, 'container', 'blob', 'blockid', 0, 4, flock, None) except Exception: pytest.fail('unexpected Exception raised') m.get('mock://blobepcontainer/blob?saskey', status_code=200) try: bcw.getblobrange(lpath, 'container', 'blob', 0, 4, flock, None) except Exception: pytest.fail('unexpected Exception raised') m.get('mock://blobepcontainer/blob?saskey', status_code=201) bcw.run() assert len(exc_list) > 0 exc_list = [] sa_in_queue = queue.Queue() sa_out_queue = queue.Queue() sa_in_queue.put((True, lpath, 'blobep', 'saskey', 'container', 'blob', 'blockid', 0, 4, flock, None)) sa_in_queue.put((False, lpath, 'blobep', 'saskey', 'container', 'blob', 'blockid', 0, 4, flock, None)) args.pageblob = True with requests_mock.mock() as m: m.put('mock://blobepcontainer/blob?saskey', status_code=200) sbs = blobxfer.SasBlobService('mock://blobep', 'saskey', None) bcw = blobxfer.BlobChunkWorker( exc_list, sa_in_queue, sa_out_queue, args, sbs) with pytest.raises(IOError): bcw.putblobdata(lpath, 'container', 'blob', 'blockid', 0, 4, flock, None)
def test_cache(self): so = get_html_doc('cmdextract', 'stackoverflow.com') query = ['du'] options = ['--no-cache'] with requests_mock.mock() as m: self._setup_request_mock(m, query, [so]) main(query + options) self._check_output(contains='du') self._truncate_stdout() with requests_mock.mock() as m: main(query) self._check_output(contains='du')
def test_verbose(self): so = get_html_doc('cmdextract', 'stackoverflow.com') query = ['du'] options = ['-v'] with requests_mock.mock() as m: self._setup_request_mock(m, query, [so]) main(query) non_verbose = self._get_current_stdout() self._truncate_stdout() with requests_mock.mock() as m: main(query + options) self.assertGreater(len(self._get_current_stdout()), len(non_verbose))
def test_get(self): doc = get_html_doc('search_engines', 'google.com') req = download.Request(doc.url.url) with requests_mock.mock() as m: m.get(req.url, content=doc.body) resp = download.get(req) self.assertIsInstance(resp, download.HtmlDocument) self.assertEqual(resp.body, doc.body) with requests_mock.mock() as m: resp = download.get(req) self.assertIsInstance(resp, download.DownloadError)
def test_max_hits(self): so = get_html_doc('cmdextract', 'stackoverflow.com') query = ['du'] options = ['--max-hits', '1'] with requests_mock.mock() as m: self._setup_request_mock(m, query, [so]) main(query) nr_lines = len(self._get_current_stdout().split('\n')) self._truncate_stdout() with requests_mock.mock() as m: main(query + options) self.assertGreater( nr_lines, len(self._get_current_stdout().split('\n')))
def test_fetch_auth_key(self): fak = PhishNetAPI(api_key='foo') with requests_mock.mock() as m: m._adapter.register_uri('GET', 'https://api.phish.net/api.json', [ {'text': '{"success": "0"}', 'status_code': 200}, ]) self.assertRaises(AuthError, fak.fetch_auth_key, 'wilson') with requests_mock.mock() as m: m._adapter.register_uri('GET', 'https://api.phish.net/api.json', [ {'text': '{"success": "0"}', 'status_code': 200}, ]) m._adapter.register_uri('POST', 'https://api.phish.net/api.json', [ {'text': '{"success": "1", "authkey": "232342342342"}', 'status_code': 200} ]) self.assertEqual('232342342342', fak.fetch_auth_key('wilson', 'password'))
def test_freezer_caching(self): expected1 = '''\ # File managed by freeze command from buildout_helpers # Changes will be overwritten # ETAG: example # ORIGIN: http://example.com/buildout.cfg [buildout]''' cfg = self.given_a_file_in_test_dir('buildout.cfg', '''\ [buildout] extends= http://example.com/buildout.cfg ''') with requests_mock.mock() as m: m.get('http://example.com/buildout.cfg', text='''[buildout]''', headers={'Etag': 'example'}) freeze(Config(cfg)) m.get('http://example.com/buildout.cfg', text='''''', status_code=304) freeze(Config(cfg)) last_call = m.request_history[-1] self.assertEqual('example', last_call._request.headers['If-None-Match']) abs_dir, _ = os.path.split(cfg) new_file_contents = open(os.path.join(abs_dir, 'external_buildouts', 'example.com_buildout.cfg'), 'r').read() self.assertEqual(new_file_contents, expected1)
def test_call_wrapper_zero_results(self): full_url = self.url % 'zero' with requests_mock.mock() as m: m.get(full_url, text=zero_results) obs = _call_wrapper(full_url) self.assertEqual(obs, {})
def test_call_wrapper_exceeded_limit(self): full_url = self.url % 'exceeded' with requests_mock.mock() as m: m.get(full_url, text=over_query_limit) with self.assertRaises(GoogleAPILimitExceeded): _call_wrapper(full_url)
def test_api_authorize(self): aa_test = PhishNetAPI(api_key='foo') with requests_mock.mock() as m: m._adapter.register_uri('POST', 'https://api.phish.net/api.json', [ {'text': '{"success": "0"}', 'status_code': 200}, ]) self.assertRaises(AuthError, aa_test.api_authorize, 'wilson', 'password')
def test_call_wrapper_request_denied(self): full_url = self.url % 'denied' with requests_mock.mock() as m: m.get(full_url, text=request_denied) with self.assertRaises(GoogleAPIRequestDenied): _call_wrapper(full_url)
def test_raises_exception_when_gateway_response_indicates_error(self): with requests_mock.mock() as m: m.post("https://www.dummy-address.com/sendMessages", status_code=500) client = self.create_client() with self.assertRaises(itcsmsgwclient.GatewayError): response = client.send([])
def test_get_all_docker_containers_info_returns_list_of_container_info_objects(self): with requests_mock.mock() as m: url = '{}api/v{}/docker/'.format(self.URL, self.API) m.get(url, text='[{}, {}]', status_code=200) docker = self.c.get_all_docker_containers_info() self.assertIsInstance(docker, list) self.assertTrue(all(isinstance(x, ContainerInfo) for x in docker))
def test_get_subcontainers_info_returns_list_of_container_info_objects(self): with requests_mock.mock() as m: url = '{}api/v{}/subcontainers/test'.format(self.URL, self.API) m.get(url, text='[{}, {}]', status_code=200) subcontainers = self.c.get_subcontainers_info('test') self.assertIsInstance(subcontainers, list) self.assertTrue(all(isinstance(x, ContainerInfo) for x in subcontainers))
def test_download_all(tmpdir): api = SentinelAPI(**_api_auth) # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength filenames = ["S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E", "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A", "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C"] api.load_query(" OR ".join(filenames)) assert len(api.get_products()) == len(filenames) # Download normally result = api.download_all(str(tmpdir)) assert len(result) == len(filenames) for path, product_info in result.items(): pypath = py.path.local(path) assert pypath.purebasename in filenames assert pypath.check(exists=1, file=1) assert pypath.size() == product_info["size"] # Force one download to fail path, product_info = list(result.items())[0] py.path.local(path).remove() with requests_mock.mock(real_http=True) as rqst: url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')/?$format=json" % product_info["id"] json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" rqst.get(url, json=json) result = api.download_all(str(tmpdir), max_attempts=1, checksum=True) assert len(result) == len(filenames) assert result[path] is None
def test_call_wrapper_ok(self): full_url = self.url % 'ok' with requests_mock.mock() as m: m.get(full_url, text=ok) obs = _call_wrapper(full_url) exp = loads(ok)['results'] self.assertEqual(obs, exp)
def test_crawl(self, app, gh_repo_mock): with requests_mock.mock() as r_mock: r_mock.get(gh_repo_mock.heartsucker_url, text=json.dumps(gh_repo_mock.heartsucker_url_json)) r_mock.get(gh_repo_mock.bl_org_url, text=json.dumps(gh_repo_mock.bl_org_url_json)) res = self.crawl(app) assert not res, 'Failed paths'
def query_test(query_pattern, jdata, qlen, search_results): with requests_mock.mock() as m: query = FindQuery(query_pattern, 1, 2) m.get(endpoint, json=jdata, status_code=200) metrics = self.finder.find_nodes(query) self.assertSequenceEqual(map(get_path, list(metrics)), map(get_start(qlen), search_results))
def test_server_use_server_version_flag(self): with open(SERVER_INFO_25_XML, 'rb') as f: si_response_xml = f.read().decode('utf-8') with requests_mock.mock() as m: m.get('http://test/api/2.4/serverInfo', text=si_response_xml) server = TSC.Server('http://test', use_server_version=True) self.assertEqual(server.version, '2.5')
def test_import_netcdf_dataset_incomplete(import_job_data, tmp_file_data, dataset_import_data): import_job_data = copy.copy(import_job_data) import_job_data['message'] = json.dumps({'next_uri': '/datasets/import/a1b2c3/overview/'}) with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data)) m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data)) m.get('https://databasin.org/api/v1/dataset_imports/a1b2c3/', text=json.dumps(dataset_import_data)) m.delete('https://databasin.org/api/v1/dataset_imports/a1b2c3/') f = six.BytesIO() with zipfile.ZipFile(f, 'w') as zf: zf.writestr('test.nc', '') zf.writestr('style.json', '') f.seek(0) with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' with pytest.raises(DatasetImportError): c.import_netcdf_dataset('test.zip') assert m.call_count == 6
def test_call_wrapper_404(self): full_url = self.url % '404' with requests_mock.mock() as m: m.get(full_url, text='', status_code=404) with self.assertRaises(IOError): _call_wrapper(full_url)
def test_import_netcdf_dataset_with_zip(import_job_data, dataset_data, tmp_file_data): with requests_mock.mock() as m: m.post('https://databasin.org/uploads/upload-temporary-file/', text=json.dumps({'uuid': 'abcd'})) m.get('https://databasin.org/api/v1/uploads/temporary-files/abcd/', text=json.dumps(tmp_file_data)) m.post('https://databasin.org/api/v1/jobs/', headers={'Location': 'https://databasin.org/api/v1/jobs/1234/'}) m.get('https://databasin.org/api/v1/jobs/1234/', text=json.dumps(import_job_data)) m.get('https://databasin.org/api/v1/datasets/a1b2c3/', text=json.dumps(dataset_data)) f = six.BytesIO() with zipfile.ZipFile(f, 'w') as zf: zf.writestr('test.nc', '') zf.writestr('style.json', '') f.seek(0) with mock.patch.object(builtins, 'open', mock.Mock(return_value=f)) as open_mock: c = Client() c._session.cookies['csrftoken'] = 'abcd' dataset = c.import_netcdf_dataset('test.zip') open_mock.assert_called_once_with('test.zip', 'a+b') assert m.call_count == 5 assert dataset.id == 'a1b2c3' request_data = json.loads(m.request_history[2].text) assert request_data['job_name'] == 'create_import_job' assert request_data['job_args']['file'] == 'abcd' assert request_data['job_args']['dataset_type'] == 'NetCDF_Native'
def test_call_wrapper_invalid_request(self): full_url = self.url % 'invalid' with requests_mock.mock() as m: m.get(full_url, text=invalid_request) with self.assertRaises(GoogleAPIInvalidRequest): _call_wrapper(full_url)
def test_call_wrapper_unknown_error(self): full_url = self.url % 'unknown' with requests_mock.mock() as m: m.get(full_url, text=unknown_error) with self.assertRaises(IOError): _call_wrapper(full_url)
def test_fragments_by_id__usingwebservice_withsomebadid(base_url): with requests_mock.mock() as m: url = base_url + '/fragments?fragment_ids=2n2k_MTN_frag1,foo-bar' # TODO use value mol fragments = [{ 'nr_r_groups': 0, 'smiles': 'CC1(C)C=C(C[S-])C(C)(C)[NH+]1O', 'pdb_code': '2n2k', 'atom_codes': 'O1,N1,C1,C2,C3,C4,S1,C5,C6,C7,C8,C9', 'het_code': 'MTN', 'hash_code': 'd491952cd7c9dc30', 'frag_nr': 1, 'frag_id': '2n2k_MTN_frag1', 'rowid': 175992, 'het_chain': 'A', 'het_seq_nr': 101, 'prot_chain': 'A', 'uniprot_acc': 'P0CG48', 'uniprot_name': 'Polyubiquitin-C', 'prot_name': 'ubiquitin', 'ec_number': None, 'mol': None, 'pdb_title': 'Ensemble structure of the closed state of Lys63-linked diubiquitin in the absence of a ligand', }] body = { 'detail': "Fragment with identifier 'foo-bar' not found", 'absent_identifiers': ['foo-bar'], 'fragments': fragments, 'status': 404, 'title': 'Not Found', 'type': 'about:blank' } m.get(url, json=body, status_code=404, headers={'Content-Type': 'application/problem+json'}) with pytest.raises(IncompleteFragments) as e: frag_ids = pd.Series(['2n2k_MTN_frag1', 'foo-bar']) fragments_by_id(frag_ids, base_url) assert_frame_equal(pd.DataFrame(fragments), e.value.fragments) assert e.value.absent_identifiers == ['foo-bar']
def test_freezer_nested(self): cfg = self.given_a_file_in_test_dir('buildout.cfg', '''\ [buildout] extends= http://example.com/buildout.cfg ''') expected1 = '''\ [buildout] extends= external_buildouts/example.com_buildout.cfg ''' expected2 = '''\ # File managed by freeze command from buildout_helpers # Changes will be overwritten # ETAG: None # ORIGIN: http://example.com/buildout.cfg [buildout] extends= example.com_buildout2.cfg ''' with requests_mock.mock() as m: m.get('http://example.com/buildout.cfg', text='''[buildout] extends= buildout2.cfg ''') m.get('http://example.com/buildout2.cfg', text='''[buildout]''') freeze(Config(cfg)) abs_dir, _ = os.path.split(cfg) new_file_contents = open(os.path.join(abs_dir, 'external_buildouts', 'example.com_buildout.cfg'), 'r').read() old_file_contents = open(cfg, 'r').read() self.assertEqual(old_file_contents, expected1) self.assertEqual(new_file_contents, expected2)
def test_login(self): with requests_mock.mock() as m: login_url = self.app._get_path(constants.LOGIN_PATH, self.server) m.post(login_url, text='{"token":"test"}') self.app.login(self.server, 'admin', 'nutanix/4u') self.assertNotEqual(self.app.auth_token, None) self.assertEquals(self.app.auth_token, 'test')
def test_populate_favorites(self): self.server.version = '2.5' baseurl = self.server.favorites.baseurl single_user = TSC.UserItem('test', 'Interactor') with open(GET_FAVORITES_XML, 'rb') as f: response_xml = f.read().decode('utf-8') with requests_mock.mock() as m: m.get('{0}/{1}'.format(baseurl, single_user.id), text=response_xml) self.server.users.populate_favorites(single_user) self.assertIsNotNone(single_user._favorites) self.assertEqual(len(single_user.favorites['workbooks']), 1) self.assertEqual(len(single_user.favorites['views']), 1) self.assertEqual(len(single_user.favorites['projects']), 1) self.assertEqual(len(single_user.favorites['datasources']), 1) workbook = single_user.favorites['workbooks'][0] view = single_user.favorites['views'][0] datasource = single_user.favorites['datasources'][0] project = single_user.favorites['projects'][0] self.assertEqual(workbook.id, '6d13b0ca-043d-4d42-8c9d-3f3313ea3a00') self.assertEqual(view.id, 'd79634e1-6063-4ec9-95ff-50acbf609ff5') self.assertEqual(datasource.id, 'e76a1461-3b1d-4588-bf1b-17551a879ad9') self.assertEqual(project.id, '1d0304cd-3796-429f-b815-7258370b9b74')
def test_get_last_output_files(self): with requests_mock.mock() as mock: path = "/data/entities/entity-01/last-output-files" mock.get(gazu.client.get_full_url(path), text=json.dumps({ "output-type-01": { "main": { "id": "output-file-1" } }, "output-type-02": { "main": { "id": "output-file-7" } } })) entity = {"id": "entity-01"} output_files_dict = gazu.files.get_last_output_files(entity) self.assertEquals( output_files_dict["output-type-01"]["main"]["id"], "output-file-1") self.assertEquals( output_files_dict["output-type-02"]["main"]["id"], "output-file-7")
def test_update_subst(subst1, subst2, spec1, spec2): with requests_mock.mock() as mock: # Given mock.get(requests_mock.ANY, text=SUBST_STREAM, headers={ 'Content-Type': 'text/html', 'Charset': 'ISO-8859-1' }) subst_updater = SubstanceUpdater() # When subst_updater.execute() subst_list = Substance.all() # Then assert len(subst_list) == 3 assert not subst_list[0]['deleted_at'] assert not subst_list[1]['deleted_at'] assert subst_list[0]['_id'] == '42215' assert subst_list[1]['_id'] == '49632' assert subst_list[2]['_id'] == '86571' assert subst_list[1]['created_at'] == '2016-12-15T00:00:00' assert subst_list[2]['deleted_at'] == '2016-12-15T00:00:00'
def test_backpressure(self): """ Checks that we catch and apply backpressure delay correctly from 503 status codes. """ supervisor = Supervisor() with requests_mock.mock() as m: m.post('%s/%s/agent/' % (DEFAULT_API_URL, DEFAULT_API_KEY), status_code=503, text='60.0') now = time.time() # talk to get delay try: supervisor.talk_to_cloud(force=True) except AmplifyCriticalException: pass # check that context.backpressure_time was changed assert_that(context.backpressure_time, not_(equal_to(self.old_backpressure_time))) assert_that(context.backpressure_time, greater_than_or_equal_to(int(now + 60.0)))
def test_backpressure_ordinal_503(self): """ Checks that the agent doesn't crash on non-formalized 503 """ supervisor = Supervisor() with requests_mock.mock() as m: m.post('%s/%s/agent/' % (DEFAULT_API_URL, DEFAULT_API_KEY), status_code=503, text='foo') now = time.time() # talk to get delay try: supervisor.talk_to_cloud(force=True) except AmplifyCriticalException: pass # check that context.backpressure_time was changed to default 60 assert_that(context.backpressure_time, not_(equal_to(self.old_backpressure_time))) assert_that(context.backpressure_time, greater_than_or_equal_to(int(now + 60.0)))
def test_talk_to_cloud(self): """ Checks that we apply all changes from cloud to agent config and object configs """ supervisor = Supervisor() with requests_mock.mock() as m: m.post( '%s/%s/agent/' % (DEFAULT_API_URL, DEFAULT_API_KEY), text= '{"config": {"cloud": {"push_interval": 30.0, "talk_interval": 60.0, "api_timeout": 10.0}, "containers": {"nginx": {"parse_delay": 60.0, "max_test_duration": 30.0, "run_test": true, "poll_intervals": {"metrics": 20.0, "configs": 20.0, "meta": 30.0, "discover": 10.0, "logs": 10.0}, "upload_ssl": true, "upload_config": true}, "system": {"poll_intervals": {"metrics": 20.0, "meta": 30.0, "discover": 10.0}}}}, "objects": [{"object":{"type":"nginx", "local_id": "b636d4376dea15405589692d3c5d3869ff3a9b26b0e7bb4bb1aa7e658ace1437"}, "config":{"upload_ssl":true}, "filters":[ {"metric": "nginx.http.method.post", "filter_rule_id": 9, "data": [["$request_uri", "~", "/api/timeseries"]]} ] }], "messages": [], "versions": {"current": 0.29, "old": 0.26, "obsolete": 0.21}}' ) supervisor.init_object_managers() for container in supervisor.object_managers.itervalues(): container._discover_objects() old_object_configs = deepcopy( supervisor.object_managers['nginx'].object_configs) old_restart_time = supervisor.last_cloud_talk_restart supervisor.talk_to_cloud(force=True) for container in supervisor.object_managers.itervalues(): container._discover_objects() assert_that(supervisor.last_cloud_talk_restart, not_(equal_to(old_restart_time))) # check that agent config was changed assert_that(context.app_config.config, not_(equal_to(self.old_cloud_config))) # check that object configs were also changed nginx_container = supervisor.object_managers['nginx'] assert_that(nginx_container.object_configs, not_(equal_to(old_object_configs)))
def test_product_node_download_single(run_cli, api, tmpdir, smallest_online_products, monkeypatch): # Change default arguments for quicker test. # Also, vcrpy is not threadsafe, so only one worker is used. monkeypatch.setattr( "sentinelsat.SentinelProductsAPI.download_all", partialmethod(SentinelProductsAPI.download_all, max_attempts=2, n_concurrent_dl=1), ) product_id = smallest_online_products[0]["id"] command = ["--uuid", product_id, "--download", "--path", str(tmpdir)] run_cli(*command) # The file already exists, should not be re-downloaded run_cli(*command) # clean up for f in tmpdir.listdir(): f.remove() # Prepare a response with an invalid checksum url = "https://apihub.copernicus.eu/apihub/odata/v1/Products('%s')?$format=json" % product_id json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" # Force the download to fail by providing an incorrect checksum with requests_mock.mock(real_http=True) as rqst: rqst.get(url, json=json) # md5 flag set (implicitly), should raise an exception run_cli(*command, must_raise=InvalidChecksumError) # clean up tmpdir.remove()
def test_get_remote_node_ggr(driver_wrapper, utils): # Configure mock driver_wrapper.driver.session_id = '5af' grid_url = 'http://{}:{}/grid/api/testsession?session={}'.format( 'localhost', 4444, '5af') ggr_url = 'http://{}:{}/host/{}'.format('localhost', 4444, '5af') ggr_response_json = { 'Count': 3, 'Username': '', 'Scheme': '', 'VNC': '', 'Name': 'host_name', 'Password': '', 'Port': 4500 } with requests_mock.mock() as req_mock: req_mock.get(grid_url, text='non_json_response') req_mock.get(ggr_url, json=ggr_response_json) # Get remote node and check result assert utils.get_remote_node() == ('ggr', 'host_name') assert grid_url == req_mock.request_history[0].url assert ggr_url == req_mock.request_history[1].url
def test_update_spec(spec1, spec2): with requests_mock.mock() as mock: # Given mock.get(requests_mock.ANY, text=SPEC_STREAM, headers={ 'Content-Type': 'text/html', 'Charset': 'ISO-8859-1' }) spec_updater = SpecialityUpdater() # When spec_updater.execute() spec_list = Speciality.all() # Then assert len(spec_list) == 3 assert not spec_list[0]['deleted_at'] assert not spec_list[1]['deleted_at'] assert spec_list[0]['_id'] == '61266250' assert spec_list[1]['_id'] == '66513085' assert spec_list[2]['_id'] == '64332894' assert spec_list[1]['created_at'] == '2016-12-12T00:00:00' assert spec_list[2]['deleted_at'] == '2016-12-12T00:00:00'
def test_update_tags(self): add_tags_xml, update_xml = read_xml_assets(ADD_TAGS_XML, UPDATE_XML) with requests_mock.mock() as m: m.put(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/tags', text=add_tags_xml) m.delete(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/tags/b', status_code=204) m.delete(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb/tags/d', status_code=204) m.put(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', text=update_xml) single_datasource = TSC.DatasourceItem( '1d0304cd-3796-429f-b815-7258370b9b74') single_datasource._id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' single_datasource._initial_tags.update(['a', 'b', 'c', 'd']) single_datasource.tags.update(['a', 'c', 'e']) updated_datasource = self.server.datasources.update( single_datasource) self.assertEqual(single_datasource.tags, updated_datasource.tags) self.assertEqual(single_datasource._initial_tags, updated_datasource._initial_tags)
def test_create_daily(self): with open(CREATE_DAILY_XML, "rb") as f: response_xml = f.read().decode("utf-8") with requests_mock.mock() as m: m.post(self.baseurl, text=response_xml) daily_interval = TSC.DailyInterval(time(4, 50)) new_schedule = TSC.ScheduleItem( "daily-schedule-1", 90, TSC.ScheduleItem.Type.Subscription, TSC.ScheduleItem.ExecutionOrder.Serial, daily_interval) new_schedule = self.server.schedules.create(new_schedule) self.assertEqual("907cae38-72fd-417c-892a-95540c4664cd", new_schedule.id) self.assertEqual("daily-schedule-1", new_schedule.name) self.assertEqual("Active", new_schedule.state) self.assertEqual(90, new_schedule.priority) self.assertEqual("2016-09-15T21:01:09Z", new_schedule.created_at) self.assertEqual("2016-09-15T21:01:09Z", new_schedule.updated_at) self.assertEqual(TSC.ScheduleItem.Type.Subscription, new_schedule.schedule_type) self.assertEqual("2016-09-16T11:45:00Z", new_schedule.next_run_at) self.assertEqual(TSC.ScheduleItem.ExecutionOrder.Serial, new_schedule.execution_order) self.assertEqual(time(4, 45), new_schedule.interval_item.start_time)
def test_time_com_id(self): # Arrange and Act with requests_mock.mock() as m: url = '{api_url}/time/id/{id}'.format(api_url=self.api_url, id=471815) m.get(url, text=self.TIME) time = self.api.time(id=471815) primeiro_atleta = time.atletas[0] # Assert self.assertIsInstance(time, Time) self.assertEqual(time.patrimonio, 0) self.assertEqual(time.valor_time, 0) self.assertEqual(time.ultima_pontuacao, 70.02978515625) self.assertIsInstance(time.atletas, list) self.assertIsInstance(primeiro_atleta, Atleta) self.assertEqual(primeiro_atleta.id, 38140) self.assertEqual(primeiro_atleta.apelido, 'Fernando Prass') self.assertEqual(primeiro_atleta.pontos, 7.5) self.assertEqual(primeiro_atleta.scout, { 'DD': 3, 'FS': 1, 'GS': 1 }) self.assertEqual(primeiro_atleta.posicao, _posicoes[1]) self.assertIsInstance(primeiro_atleta.clube, Clube) self.assertEqual(primeiro_atleta.clube.id, 275) self.assertEqual(primeiro_atleta.clube.nome, 'Palmeiras') self.assertEqual(primeiro_atleta.clube.abreviacao, 'PAL') self.assertEqual(primeiro_atleta.status, _atleta_status[7]) self.assertIsInstance(time.info, TimeInfo) self.assertEqual(time.info.id, 471815) self.assertEqual(time.info.nome, 'Falydos FC') self.assertEqual(time.info.nome_cartola, 'Vicente Neto') self.assertEqual(time.info.slug, 'falydos-fc') self.assertTrue(time.info.assinante)
def test_get_by_id_personal(self): # workbooks in personal space don't have project_id or project_name with open(GET_BY_ID_XML_PERSONAL, 'rb') as f: response_xml = f.read().decode('utf-8') with requests_mock.mock() as m: m.get(self.baseurl + '/3cc6cd06-89ce-4fdc-b935-5294135d6d43', text=response_xml) single_workbook = self.server.workbooks.get_by_id('3cc6cd06-89ce-4fdc-b935-5294135d6d43') self.assertEqual('3cc6cd06-89ce-4fdc-b935-5294135d6d43', single_workbook.id) self.assertEqual('SafariSample', single_workbook.name) self.assertEqual('SafariSample', single_workbook.content_url) self.assertEqual('http://tableauserver/#/workbooks/2/views', single_workbook.webpage_url) self.assertEqual(False, single_workbook.show_tabs) self.assertEqual(26, single_workbook.size) self.assertEqual('2016-07-26T20:34:56Z', format_datetime(single_workbook.created_at)) self.assertEqual('description for SafariSample', single_workbook.description) self.assertEqual('2016-07-26T20:35:05Z', format_datetime(single_workbook.updated_at)) self.assertTrue(single_workbook.project_id) self.assertIsNone(single_workbook.project_name) self.assertEqual('5de011f8-5aa9-4d5b-b991-f462c8dd6bb7', single_workbook.owner_id) self.assertEqual(set(['Safari', 'Sample']), single_workbook.tags) self.assertEqual('d79634e1-6063-4ec9-95ff-50acbf609ff5', single_workbook.views[0].id) self.assertEqual('ENDANGERED SAFARI', single_workbook.views[0].name) self.assertEqual('SafariSample/sheets/ENDANGEREDSAFARI', single_workbook.views[0].content_url)
def test_add_permissions(self): with open(UPDATE_PERMISSIONS, 'rb') as f: response_xml = f.read().decode('utf-8') single_workbook = TSC.WorkbookItem('test') single_workbook._id = '21778de4-b7b9-44bc-a599-1506a2639ace' bob = UserItem.as_reference("7c37ee24-c4b1-42b6-a154-eaeab7ee330a") group_of_people = GroupItem.as_reference( "5e5e1978-71fa-11e4-87dd-7382f5c437af") new_permissions = [ PermissionsRule(bob, {'Write': 'Allow'}), PermissionsRule(group_of_people, {'Read': 'Deny'}) ] with requests_mock.mock() as m: m.put(self.baseurl + "/21778de4-b7b9-44bc-a599-1506a2639ace/permissions", text=response_xml) permissions = self.server.workbooks.update_permissions( single_workbook, new_permissions) self.assertEqual(permissions[0].grantee.tag_name, 'group') self.assertEqual(permissions[0].grantee.id, '5e5e1978-71fa-11e4-87dd-7382f5c437af') self.assertDictEqual( permissions[0].capabilities, {TSC.Permission.Capability.Read: TSC.Permission.Mode.Deny}) self.assertEqual(permissions[1].grantee.tag_name, 'user') self.assertEqual(permissions[1].grantee.id, '7c37ee24-c4b1-42b6-a154-eaeab7ee330a') self.assertDictEqual( permissions[1].capabilities, {TSC.Permission.Capability.Write: TSC.Permission.Mode.Allow})
def test_update(self): with open(UPDATE_XML, 'rb') as f: response_xml = f.read().decode('utf-8') with requests_mock.mock() as m: m.put(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', text=response_xml) single_datasource = TSC.DatasourceItem( 'test', '1d0304cd-3796-429f-b815-7258370b9b74') single_datasource.owner_id = 'dd2239f6-ddf1-4107-981a-4cf94e415794' single_datasource._id = '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb' single_datasource.certified = True single_datasource.certification_note = "Warning, here be dragons." single_datasource = self.server.datasources.update( single_datasource) self.assertEqual('9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', single_datasource.id) self.assertEqual('1d0304cd-3796-429f-b815-7258370b9b74', single_datasource.project_id) self.assertEqual('dd2239f6-ddf1-4107-981a-4cf94e415794', single_datasource.owner_id) self.assertEqual(True, single_datasource.certified) self.assertEqual("Warning, here be dragons.", single_datasource.certification_note)
def test_pos_rodada_destaques_com_mercado_aberto(self): # Arrange and Act with requests_mock.mock() as m: url = '{api_url}/mercado/status'.format(api_url=self.api_url) m.get(url, text=self.MERCADO_STATUS_ABERTO) url = '{api_url}/pos-rodada/destaques'.format(api_url=self.api_url) m.get(url, text=self.POS_RODADA_DESTAQUES) destaque_rodada = self.api.pos_rodada_destaques() # Assert self.assertIsInstance(destaque_rodada, DestaqueRodada) self.assertEqual(destaque_rodada.media_cartoletas, 115.8235753058391) self.assertEqual(destaque_rodada.media_pontos, 46.6480728839843) self.assertIsInstance(destaque_rodada.mito_rodada, TimeInfo) self.assertEqual(destaque_rodada.mito_rodada.id, 896224) self.assertEqual(destaque_rodada.mito_rodada.nome, 'gama campos fc') self.assertEqual(destaque_rodada.mito_rodada.nome_cartola, 'malmal') self.assertEqual(destaque_rodada.mito_rodada.slug, 'gama-campos-fc') self.assertFalse(destaque_rodada.mito_rodada.assinante)
def test_create_ticket(notify_api): def match_json(request): expected = { 'product_id': 42, 'subject': 'Ask a question', 'description': 'my message', 'email': '*****@*****.**', 'priority': 1, 'status': 2, 'tags': [] } encoded_auth = base64.b64encode(b'freshdesk-api-key:x').decode('ascii') json_matches = request.json() == expected basic_auth_header = request.headers.get( 'Authorization') == f"Basic {encoded_auth}" return json_matches and basic_auth_header with requests_mock.mock() as rmock: rmock.request("POST", 'https://example.com/freshdesk/api/v2/tickets', additional_matcher=match_json, status_code=201) with notify_api.app_context(): response = Freshdesk.create_ticket({ 'message': 'my message', 'email': '*****@*****.**', 'support_type': 'Ask a question', }) assert response == 201
def test_download_all(tmpdir): api = SentinelAPI(**_api_auth) # From https://scihub.copernicus.eu/apihub/odata/v1/Products?$top=5&$orderby=ContentLength filenames = [ "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E", "S1A_WV_OCN__2SSV_20150526T211029_20150526T211737_006097_007E78_134A", "S1A_WV_OCN__2SSV_20150526T081641_20150526T082418_006090_007E3E_104C" ] products = api.load_query(" OR ".join(filenames)) assert len(products) == len(filenames) # Download normally result = api.download_all(products, str(tmpdir)) assert len(result) == len(filenames) for path, product_info in result.items(): pypath = py.path.local(path) assert pypath.purebasename in filenames assert pypath.check(exists=1, file=1) assert pypath.size() == product_info["size"] # Force one download to fail path, product_info = list(result.items())[0] py.path.local(path).remove() with requests_mock.mock(real_http=True) as rqst: url = "https://scihub.copernicus.eu/apihub/odata/v1/Products('%s')/?$format=json" % product_info[ "id"] json = api.session.get(url).json() json["d"]["Checksum"]["Value"] = "00000000000000000000000000000000" rqst.get(url, json=json) result = api.download_all(products, str(tmpdir), max_attempts=1, checksum=True) assert len(result) == len(filenames) assert result[path] is None
def test_job_wait_complete1_success_result(): """Test wait_for_completion() with successful complete job with a result.""" with requests_mock.mock() as m: mock_server_1(m) session = Session('fake-host', 'fake-user', 'fake-pw') op_method = 'POST' op_uri = '/api/foo' job = Job(session, JOB_URI, op_method, op_uri) exp_op_result = { 'foo': 'bar', } query_job_status_result = { 'status': 'complete', 'job-status-code': 200, # 'job-reason-code' omitted because HTTP status good 'job-results': exp_op_result, } m.get(JOB_URI, json=query_job_status_result) m.delete(JOB_URI, status_code=204) op_result = job.wait_for_completion() assert op_result == exp_op_result
def test_get_by_id(self): response_xml = read_xml_asset(GET_BY_ID_XML) with requests_mock.mock() as m: m.get(self.baseurl + '/9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', text=response_xml) single_datasource = self.server.datasources.get_by_id( '9dbd2263-16b5-46e1-9c43-a76bb8ab65fb') self.assertEqual('9dbd2263-16b5-46e1-9c43-a76bb8ab65fb', single_datasource.id) self.assertEqual('dataengine', single_datasource.datasource_type) self.assertEqual('Sampledatasource', single_datasource.content_url) self.assertEqual('2016-08-04T21:31:55Z', format_datetime(single_datasource.created_at)) self.assertEqual('2016-08-04T21:31:55Z', format_datetime(single_datasource.updated_at)) self.assertEqual('default', single_datasource.project_name) self.assertEqual('Sample datasource', single_datasource.name) self.assertEqual('ee8c6e70-43b6-11e6-af4f-f7b0d8e20760', single_datasource.project_id) self.assertEqual('5de011f8-5aa9-4d5b-b991-f462c8dd6bb7', single_datasource.owner_id) self.assertEqual(set(['world', 'indicators', 'sample']), single_datasource.tags)
def test_update_datasource_default_permission(self) -> None: response_xml = read_xml_asset( UPDATE_DATASOURCE_DEFAULT_PERMISSIONS_XML) with requests_mock.mock() as m: m.put( self.baseurl + "/b4065286-80f0-11ea-af1b-cb7191f48e45/default-permissions/datasources", text=response_xml, ) project = TSC.ProjectItem("test-project") project._id = "b4065286-80f0-11ea-af1b-cb7191f48e45" group = TSC.GroupItem("test-group") group._id = "b4488bce-80f0-11ea-af1c-976d0c1dab39" capabilities = { TSC.Permission.Capability.ExportXml: TSC.Permission.Mode.Deny } rules = [ TSC.PermissionsRule(grantee=group.to_reference(), capabilities=capabilities) ] new_rules = self.server.projects.update_datasource_default_permissions( project, rules) self.assertEqual("b4488bce-80f0-11ea-af1c-976d0c1dab39", new_rules[0].grantee.id) updated_capabilities = new_rules[0].capabilities self.assertEqual(4, len(updated_capabilities)) self.assertEqual("Deny", updated_capabilities["ExportXml"]) self.assertEqual("Allow", updated_capabilities["Read"]) self.assertEqual("Allow", updated_capabilities["Write"]) self.assertEqual("Allow", updated_capabilities["Connect"])
def prepared_request(self, method, url, body=None, headers=None, raw=False, stream=False): headers = self._normalize_headers(headers=headers) r_status, r_body, r_headers, r_reason = self._get_request( method, url, body, headers) with requests_mock.mock() as m: m.register_uri(method, url, text=r_body, reason=r_reason, headers=r_headers, status_code=r_status) super(MockHttp, self).prepared_request(method=method, url=url, body=body, headers=headers, raw=raw, stream=stream)
def test_call_api_post_with_assaults_configuration(): assaults_configuration = { "level": 5, "latencyRangeStart": 2000, "latencyRangeEnd": 5000, "latencyActive": True, "exceptionsActive": True, "killApplicationActive": False, "restartApplicationActive": False, } request_headers = { "Accept": "application/json", "Content-Type": "application/json" } with requests_mock.mock() as m: m.request( "POST", "http://localhost:8080/actuator/chaosmonkey/assaults", request_headers=request_headers, status_code=codes.ok, text="Chaos Monkey assaults configuration changed!", ) response = call_api( base_url="http://localhost:8080/actuator", api_endpoint="chaosmonkey/assaults", method="POST", assaults_configuration=assaults_configuration, timeout=None, configuration=None, secrets=None, ) assert response.status_code == codes.ok assert response.text == "Chaos Monkey assaults configuration changed!"
def test_successful_answer(): """Test successful answer.""" expected = b'data' with requests_mock.mock() as mock: mock.get(URL, content=b'data') assert send_request(URL).content == expected
def test_raises_exception_connection_error(): """Test raise exception when request error.""" with requests_mock.mock() as mock: mock.request(requests_mock.ANY, requests_mock.ANY, exc=RequestException) with pytest.raises(RequestException): send_request(URL)
def test_conf_broker(self): """ Test load new conf in broker :return: None """ self.setup_with_file('cfg/cfg_default_with_modules.cfg', 'cfg/default_with_modules/alignak.ini') args = { 'env_file': self.env_filename, 'alignak_name': 'my-alignak', 'daemon_name': 'broker-master', } broker = brokerdaemon(**args) broker.load_modules_manager() assert 1 == len(broker.modules) broker_link = None for satellite in self._arbiter.dispatcher.satellites: if satellite.name == 'broker-master': broker_link = satellite broker.new_conf = satellite.cfg break assert broker_link is not None # Simulate the daemons HTTP interface (very simple simulation !) with requests_mock.mock() as mockreq: mockreq.get('http://127.0.0.1:7768/ping', json='pong') mockreq.get('http://127.0.0.1:7768/get_running_id', json={"running_id": 123456.123456}) mockreq.get('http://127.0.0.1:7768/fill_initial_broks', json=[]) mockreq.get('http://127.0.0.1:7768/get_managed_configurations', json={}) broker.setup_new_conf() # Check modules received configuration assert 1 == len(broker.modules) print(("Modules: %s" % broker.modules)) print((" - : %s" % broker.modules[0].__dict__)) assert broker.modules[0].module_alias == 'Example' assert broker.modules[0].option_1 == 'foo' assert broker.modules[0].option_2 == 'bar' assert broker.modules[0].option_3 == 'foobar' assert len(broker.schedulers) == 1 if broker_link.manage_arbiters: assert len(broker.arbiters) == 1 else: assert len(broker.arbiters) == 0 assert len(broker.pollers) == 1 assert len(broker.reactionners) == 1 assert len(broker.receivers) == 1 # send new conf, so it's the second time. This tests the cleanup self.setup_with_file('cfg/cfg_default_with_modules.cfg', 'cfg/default_with_modules/alignak.ini') broker_link = None for satellite in self._arbiter.dispatcher.satellites: if satellite.type == 'broker': broker_link = satellite broker.new_conf = satellite.cfg break assert broker_link is not None # Simulate the daemons HTTP interface (very simple simulation !) with requests_mock.mock() as mockreq: mockreq.get('http://127.0.0.1:7768/ping', json='pong') mockreq.get('http://127.0.0.1:7768/get_running_id', json={"running_id": 123456.123456}) mockreq.get('http://127.0.0.1:7768/fill_initial_broks', json=[]) mockreq.get('http://127.0.0.1:7768/get_managed_configurations', json={}) broker.setup_new_conf() assert len(broker.schedulers) == 1 if broker_link.manage_arbiters: assert len(broker.arbiters) == 1 else: assert len(broker.arbiters) == 0 assert len(broker.pollers) == 1 assert len(broker.reactionners) == 1 assert len(broker.receivers) == 1 # Stop launched modules broker.modules_manager.stop_all()
def rmock(): with requests_mock.mock() as rmock: yield rmock
def test_delete(self): with requests_mock.mock() as m: m.delete(self.baseurl + '/ee8c6e70-43b6-11e6-af4f-f7b0d8e20760', status_code=204) self.server.projects.delete('ee8c6e70-43b6-11e6-af4f-f7b0d8e20760')
def test_validate_all_tolerance_probes(): with requests_mock.mock() as m: m.get("http://example.com", text="you are number 87") ensure_experiment_is_valid(experiments.ExperimentWithVariousTolerances)
def mock_site_api(base_url, site): with requests_mock.mock() as m: m.post(base_url + '/tokens.json', json={'token': u'744cfcfb3cd3'}) m.get(base_url + '/sites', json=[site]) yield m.put(base_url + '/sites/%s' % site['_id'], json={'foo': 'we_do_not_care'})