def test_update_resource_url_hooks(self, app): responses.add( responses.POST, "http://datapusher.ckan.org/job", content_type="application/json", body=json.dumps({ "job_id": "foo", "job_key": "barloco" }), ) responses.add_passthru(config["solr_url"]) dataset = factories.Dataset() resource = call_action( "resource_create", package_id=dataset['id'], url='http://example.com/old.csv', format='CSV', ) resource = call_action( "resource_update", id=resource['id'], url='http://example.com/new.csv', format='CSV', ) assert resource
def test_third_party_instrumentor(self, telemetry: TelemetryFixture): import requests from telemetry.api.listeners.span import LabelAttributes, InstrumentorSpanListener RequestsInstrumentor().instrument() telemetry.initialize() telemetry.add_span_processor(InstrumentorSpanListener( LabelAttributes('component', 'http.status_code', 'http.method'), 'requests')) responses.add_passthru('http://localhost:1234/does_not_exist') with telemetry.span('test_category', 'span1', attributes={TestAttributes.LABEL1: 'l1'}) as span: try: with requests.get('http://localhost:1234/does_not_exist') as response: pass except: pass telemetry.collect() assert telemetry.get_value_recorder(name='trace.duration', labels={'component': 'http', 'http.method': 'GET', TestAttributes.LABEL1.name: 'l1', Attributes.TRACE_CATEGORY.name: 'requests', Attributes.TRACE_NAME.name: 'requests.HTTP GET', Attributes.TRACE_STATUS.name: 'ERROR'}).count == 1
def task_schema(): responses.add_passthru("https://community-tc.services.mozilla.com/") r = requests.get( "https://community-tc.services.mozilla.com/schemas/queue/v1/create-task-request.json" ) r.raise_for_status() return r.json()
def test_harvest(app, apiharvester_config_vs, apiharvester_apiresponse_vs): """Test harvest cli.""" runner = CliRunner() script_info = ScriptInfo(create_app=lambda info: app) """Mock a request response.""" responses.add_passthru( re.compile('http://localhost:9200/(.*)') ) url1 = '{url}{static}'.format( url=apiharvester_config_vs.get('url'), static='/v1/resources.json?start_at=1900-01-01T00:00:00&page=1', ) headers1 = { 'X-Total-Pages': '1', 'X-Total-Items': '1', 'X-Per-Page': '20', 'X-Current-Page': '1' } responses.add( responses.GET, url1, status=200, json=apiharvester_apiresponse_vs, headers=headers1 ) url2 = '{url}{static}'.format( url=apiharvester_config_vs.get('url'), static='/v1/resources.json?start_at=1900-01-01T00:00:00&page=2' ) headers2 = { 'X-Total-Pages': '1', 'X-Total-Items': '1', 'X-Per-Page': '20', 'X-Current-Page': '2' } responses.add( responses.GET, url2, status=200, headers=headers2 ) res = runner.invoke( cli.harvest, [ '-n', 'VS', '-v' ], obj=script_info ) assert 0 == res.exit_code output = '{line1}\n{line2}\n{line3}\n{line4}\n'.format( line1='Harvest api: VS', line2=('API page: 1 url: {url}').format(url=url1), line3='1: {link}'.format( link=apiharvester_apiresponse_vs['resources'][0]['link'] ), line4='API harvest 1 items | got 1 from VS' ) assert res.output == output
def test_handle_network_error(cli, datafiles): # allow manifest to be fetched responses.add_passthru( "https://registry.hub.docker.com/v2/library/alpine/manifests/" "sha256%3A4b8ffaaa896d40622ac10dc6662204f429f1c8c5714be62a6493a7895f66409" ) # allow authentication to go through responses.add_passthru( "https://auth.docker.io/" "token?service=registry.docker.io&scope=repository:library/alpine:pull" ) # By not adding a rule for the blob, accessing # "https://registry.hub.docker.com/v2/" \ # "library/alpine/blobs/sha256%3Ab56ae66c29370df48e7377c8f9baa744a3958058a766793f821dadcb144a4647" # will throw a `ConnectionError`. # attempt to fetch source project = os.path.join(datafiles.dirname, datafiles.basename) result = cli.run(project=project, args=["source", "fetch", "dockerhub-alpine.bst"]) # check that error is thrown result.assert_task_error(ErrorDomain.SOURCE, None) # check that BuildStream still runs normally result = cli.run(project=project, args=["show", "dockerhub-alpine.bst"]) result.assert_success()
def test_providing_res_with_url_calls_datapusher_correctly(self, app): config["datapusher.url"] = "http://datapusher.ckan.org" responses.add( responses.POST, "http://datapusher.ckan.org/job", content_type="application/json", body=json.dumps({ "job_id": "foo", "job_key": "bar" }), ) responses.add_passthru(config["solr_url"]) package = model.Package.get("annakarenina") tests.call_action_api( app, "datastore_create", apikey=self.sysadmin_user.apikey, resource=dict(package_id=package.id, url="demo.ckan.org"), ) assert len(package.resources) == 3, len(package.resources) resource = package.resources[2] data = json.loads(responses.calls[-1].request.body) assert data["metadata"]["resource_id"] == resource.id, data assert not data["metadata"].get("ignore_hash"), data assert data["result_url"].endswith("/action/datapusher_hook"), data assert data["result_url"].startswith("http://"), data
def test_get_objects_bad_auth(services): # Mock the obj info request responses.add_callback(responses.POST, config['workspace_url'], callback=ws_call) # Allow elasticsearch calls responses.add_passthru("http://localhost:9200/") params = { "method": "KBaseSearchEngine.get_objects", "version": "1.1", "params": [{ 'ids': ['public-doc1'], 'post_processing': { 'ids_only': 1 }, }], } result = rpc.call(json.dumps(params), {'auth': 'bad_token'}) res = json.loads(result) assert res['version'] == '1.1' assert 'error' in res error = res['error'] assert error['code'] == 2000 assert error['message'] == 'Auth error' assert error['name'] == 'APIError'
def test_providing_res_with_url_calls_datapusher_correctly(self): config['datapusher.url'] = 'http://datapusher.ckan.org' responses.add( responses.POST, 'http://datapusher.ckan.org/job', content_type='application/json', body=json.dumps({'job_id': 'foo', 'job_key': 'bar'})) responses.add_passthru(config['solr_url']) package = model.Package.get('annakarenina') tests.call_action_api( self.app, 'datastore_create', apikey=self.sysadmin_user.apikey, resource=dict(package_id=package.id, url='demo.ckan.org') ) assert len(package.resources) == 4, len(package.resources) resource = package.resources[3] data = json.loads(responses.calls[-1].request.body) assert data['metadata']['resource_id'] == resource.id, data assert not data['metadata'].get('ignore_hash'), data assert data['result_url'].endswith('/action/datapusher_hook'), data assert data['result_url'].startswith('http://'), data
def test_download_metadata404(text_dandiset: SampleDandiset, tmp_path: Path) -> None: responses.add_passthru(re.compile("^http")) asset = text_dandiset.dandiset.get_asset_by_path("subdir1/apple.txt") responses.add(responses.GET, asset.api_url, status=404) statuses = list( download_generator( DandisetURL( api_url=text_dandiset.client.api_url, dandiset_id=text_dandiset.dandiset.identifier, version_id=text_dandiset.dandiset.version_id, ), tmp_path, )) errors = [s for s in statuses if s.get("status") == "error"] assert errors == [{ "path": "subdir1/apple.txt", "status": "error", "message": f"No such asset: {asset}", }] assert list_paths(tmp_path, dirs=True) == [ tmp_path / dandiset_metadata_file, tmp_path / "file.txt", tmp_path / "subdir2", tmp_path / "subdir2" / "banana.txt", tmp_path / "subdir2" / "coconut.txt", ]
def test_b64_exe(self): self.setup_scrape_results() raw_paste = "TVqQAAMAAAAEAAAA//8AALgAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAA4fug4AtAnNIbgBTM0hVGhpcyBwcm9ncmFtIGNhbm5vdCBiZSBydW4gaW4gRE9TIG1vZGUuDQ0KJAAAAAAAAABQRQAATAEDAP7MnlkAAAAAAAAAAOAAAgELAQgAAJwAAAASAAAAAAAATroAAAAgAAAAAAAAAABAAAAgAAAAAgAABAAAAAAAAAAEAAAAAAAAAAAAAQAAAgAAAAAAAAIAQIUAABAAABAAAAAAEAAAEAAAAAAAABAAAAAAAAAAAAAAAPS5AABXAAAAAMAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAOAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAACAAAAAAAAAAAAAAACCAAAEgAAAAAAAAAAAAAubun" responses.add( responses.GET, "https://scrape.pastebin.com/api_scrape_item.php?i=abcd1234", body=raw_paste, status=404) if self.config['general']['viper']['enabled']: responses.add_passthru(self.config['general']['viper']['api_host']) # delete our test file if its already in Viper auth_token = self.config["general"]["viper"]["auth_token"] header = {"Authorization": "Token " + auth_token} r = requests.delete( "https://viper.charlesarvey.com/api/v3/project/default/malware/d363de25d4608eb4fca54f920f1e8cb33acb10f74018d0e0baeaac4cee2d0073/", headers=header) print("Deleting old file...", r.status_code, r.content) pastehunter = PasteHunter(testing=True) pastehunter.start_scanner() stored_doc = self.get_log() self.assertIn('b64_exe', stored_doc['YaraRule'])
def setUp(self): super(TestSigning, self).setUp() # Change addon file name self.addon = amo.tests.addon_factory() self.addon.update(guid='xxxxx') self.version = self.addon.current_version self.file_ = self.version.all_files[0] # Add actual file to addons if not os.path.exists(os.path.dirname(self.file_.file_path)): os.makedirs(os.path.dirname(self.file_.file_path)) fp = zipfile.ZipFile(self.file_.file_path, 'w') fp.writestr('install.rdf', ('<?xml version="1.0"?><RDF ' ' xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#" ' ' xmlns:em="http://www.mozilla.org/2004/em-rdf#">' '<Description about="urn:mozilla:install-manifest">' ' <em:id>foo@jetpack</em:id>' ' <em:type>2</em:type>' ' <em:bootstrap>true</em:bootstrap>' ' <em:unpack>false</em:unpack>' ' <em:version>0.1</em:version>' ' <em:name>foo</em:name>' ' <em:description>foo bar</em:description>' ' <em:optionsType>2</em:optionsType>' ' <em:targetApplication></em:targetApplication>' '</Description>' '</RDF>')) fp.close() responses.add_passthru(settings.AUTOGRAPH_CONFIG['server_url'])
def test_providing_res_with_url_calls_datapusher_correctly(self): config['datapusher.url'] = 'http://datapusher.ckan.org' responses.add(responses.POST, 'http://datapusher.ckan.org/job', content_type='application/json', body=json.dumps({ 'job_id': 'foo', 'job_key': 'bar' })) responses.add_passthru(config['solr_url']) package = model.Package.get('annakarenina') tests.call_action_api(self.app, 'datastore_create', apikey=self.sysadmin_user.apikey, resource=dict(package_id=package.id, url='demo.ckan.org')) assert len(package.resources) == 4, len(package.resources) resource = package.resources[3] data = json.loads(responses.calls[-1].request.body) assert data['metadata']['resource_id'] == resource.id, data assert not data['metadata'].get('ignore_hash'), data assert data['result_url'].endswith('/action/datapusher_hook'), data assert data['result_url'].startswith('http://'), data
def payload_schema(): responses.add_passthru("https://community-tc.services.mozilla.com/") r = requests.get( "https://community-tc.services.mozilla.com/schemas/docker-worker/v1/payload.json" ) r.raise_for_status() return r.json()
def add_profile_mock(context, profile_info): """ Adds http mock which returns to the http calls for amazon-profile-information with the given info from profil_info Args: context(Context): The skill's context profile_info(ProfileInfo): The profil info """ def request_callback(request): profile_info_type = re.search(r"Profile\.(\w+)", request.path_url).group(1) if profile_info is not None: info_dict = { "name": profile_info.name, "givenName": profile_info.given_name, "email": profile_info.email, "mobileNumber": profile_info.mobile_number } if profile_info_type in info_dict and info_dict[ profile_info_type] is not None: return 200, {}, json.dumps(info_dict[profile_info_type]) return 401, {}, json.dumps({}) # noinspection PyUnresolvedReferences responses.add_passthru('') # noinspection PyUnresolvedReferences responses.add_callback( responses.GET, re.compile(r"{}v2/accounts/~current/settings" r"/Profile\.(name|givenName|email|mobileNumber)".format( context.system.api_endpoint)), callback=request_callback, content_type="application/json")
def test_package_publish_microdata(self): context = {'model': model, 'user': self.sysadmin['name']} # Patch requests url = 'https://microdata.unhcr.org/index.php/api/datasets/create/survey/DATASET' responses.add_passthru('http') responses.add(responses.POST, url, status=200, json={ 'status': 'success', 'dataset': { 'id': 1 }, }) # Publish to microdata survey = toolkit.get_action('package_publish_microdata')( context, { 'id': self.dataset['id'], 'nation': 'nation', 'repoid': 'repoid', }) # Check calls call = responses.calls[0] assert len(responses.calls) == 1 assert call.request.url == url assert call.request.headers['X-Api-Key'] == 'API-KEY' assert call.request.headers['Content-Type'] == 'application/json' assert (json.loads( call.request.body) == helpers.convert_dataset_to_microdata_survey( self.dataset, 'nation', 'repoid')) assert survey[ 'url'] == 'https://microdata.unhcr.org/index.php/catalog/1'
def mock_easydita(): for filename in glob.glob(os.path.join(rootdir, "testdata/bundles/*.zip")): UUID = os.path.splitext(os.path.basename(filename))[0] url = f"https://salesforce.easydita.com/rest/all-files/{UUID}/bundle" with open(filename, "rb") as f: responses.add(responses.GET, url, body=f.read()) responses.add_passthru("https://test.salesforce.com") def pass_thru(request): response = responses._real_send(responses.HTTPAdapter(), request) return (response.status_code, response.headers, response.raw.data) # Pass through all calls to Salesforce because it is too complex to mock. # Can't use the responses.add_passthru feature due to the ned responses.add_callback( method=responses.GET, url=re.compile("https://.*.salesforce.com/.*"), callback=pass_thru, ) responses.add_callback( method=responses.POST, url=re.compile("https://.*.salesforce.com/.*"), callback=pass_thru, ) responses.add_callback( method=responses.PATCH, url=re.compile("https://.*.salesforce.com/.*"), callback=pass_thru, ) responses.add_callback( method=responses.DELETE, url=re.compile("https://.*.salesforce.com/.*"), callback=pass_thru, )
def test_custom_callback_url_base(self, app): package = model.Package.get("annakarenina") resource = package.resources[0] responses.add( responses.POST, "http://datapusher.ckan.org/job", content_type="application/json", body=json.dumps({ "job_id": "foo", "job_key": "barloco" }), ) responses.add_passthru(config["solr_url"]) tests.call_action_api( app, "datapusher_submit", apikey=self.sysadmin_user.apikey, resource_id=resource.id, ignore_hash=True, ) data = json.loads(responses.calls[-1].request.body) assert (data["result_url"] == "https://ckan.example.com/api/3/action/datapusher_hook")
def test_false_positive(): # Test for false positive responses.add_passthru( "https://raw.githubusercontent.com/AliasIO/wappalyzer/master/src/technologies.json" ) responses.add( responses.GET, url="http://perdu.com/", body= "<html><head><title>Vous Etes Perdu ?</title></head><body><h1>Perdu sur l'Internet ?</h1> \ <h2>Pas de panique, on va vous aider</h2> \ <strong><pre> * <----- vous êtes ici</pre></strong></body></html>" ) persister = FakePersister() request = Request("http://perdu.com/") request.path_id = 1 persister.requests.append(request) crawler = Crawler("http://perdu.com/") options = {"timeout": 10, "level": 2} logger = Mock() module = mod_wapp(crawler, persister, logger, options) module.verbose = 2 for __ in module.attack(): pass assert not persister.additionals
def test_get_objects_valid(services): # Mock the obj info request responses.add(responses.POST, config['workspace_url'], json=mock_obj_info, status=200) # Allow elasticsearch calls responses.add_passthru("http://localhost:9200/") params = { "method": "KBaseSearchEngine.get_objects", "version": "1.1", "id": 0, "params": [{ 'ids': ['public-doc1'], 'post_processing': { 'ids_only': 1 }, }], } result = rpc.call(json.dumps(params), {'auth': None}) res = json.loads(result) assert res['version'] == '1.1' assert res['id'] == 0 assert 'result' in res assert len(res['result']) == 1
def test_retrieval_of_dns_record(): responses.add_passthru("https://") cert = certificate() request = Request("Create", cert["CertificateArn"]) response = handler(request, {}) assert response["Status"] == "SUCCESS", response["Reason"] assert "Name" in response["Data"] assert "Type" in response["Data"] assert "Value" in response["Data"] assert response["Data"]["Type"] == "CNAME" assert "PhysicalResourceId" in response record_name = response["Data"]["Name"] physical_resource_id = response["PhysicalResourceId"] assert physical_resource_id == record_name request = Request( "Create", cert["CertificateArn"], cert["SubjectAlternativeNames"][1], ) response = handler(request, {}) assert response["Status"] == "SUCCESS", response["Reason"] assert "Name" in response["Data"] assert "Type" in response["Data"] assert "Value" in response["Data"] assert response["Data"]["Type"] == "CNAME" assert "PhysicalResourceId" in response
def setUp(self): super(TestPackaged, self).setUp() # Change addon file name self.addon = amo.tests.addon_factory() self.addon.update(guid='xxxxx') self.version = self.addon.current_version self.file_ = self.version.all_files[0] # Add actual file to addons if not os.path.exists(os.path.dirname(self.file_.file_path)): os.makedirs(os.path.dirname(self.file_.file_path)) fp = zipfile.ZipFile(self.file_.file_path, 'w') fp.writestr('install.rdf', ( '<?xml version="1.0"?><RDF ' ' xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#" ' ' xmlns:em="http://www.mozilla.org/2004/em-rdf#">' '<Description about="urn:mozilla:install-manifest">' ' <em:id>foo@jetpack</em:id>' ' <em:type>2</em:type>' ' <em:bootstrap>true</em:bootstrap>' ' <em:unpack>false</em:unpack>' ' <em:version>0.1</em:version>' ' <em:name>foo</em:name>' ' <em:description>foo bar</em:description>' ' <em:optionsType>2</em:optionsType>' ' <em:targetApplication></em:targetApplication>' '</Description>' '</RDF>')) fp.close() responses.add_passthru(settings.AUTOGRAPH_CONFIG['server_url'])
def setUp(self): self.client.force_login( User.objects.get_or_create(username='******', is_superuser=True, is_staff=True)[0]) cloudlaunch_url = f'{self.live_server_url}/cloudman/cloudlaunch/api/v1' patcher1 = patch('clusterman.api.CMServiceContext.cloudlaunch_url', new_callable=PropertyMock, return_value=cloudlaunch_url) patcher1.start() self.addCleanup(patcher1.stop) def create_mock_provider(self, name, config): provider_class = self.get_provider_class("mock") return provider_class(config) patcher2 = patch( 'cloudbridge.factory.CloudProviderFactory.create_provider', new=create_mock_provider) patcher2.start() self.addCleanup(patcher2.stop) patcher3 = patch( 'cloudlaunch.configurers.SSHBasedConfigurer._check_ssh') patcher3.start() self.addCleanup(patcher3.stop) patcher4 = patch( 'cloudlaunch.configurers.AnsibleAppConfigurer.configure') patcher4.start() self.addCleanup(patcher4.stop) responses.add_passthru('http://localhost') responses.add(responses.POST, 'https://127.0.0.1:4430/v3/clusterregistrationtoken', json={'nodeCommand': 'docker run rancher --worker'}, status=200) responses.add(responses.GET, 'https://127.0.0.1:4430/v3/nodes/?clusterId=c-abcd1', json={ 'data': [{ 'id': 'c-ph9ck:m-01606aca4649', 'ipAddress': '10.1.1.1', 'externalIpAddress': None }] }, status=200) responses.add( responses.POST, 'https://127.0.0.1:4430/v3/nodes/c-ph9ck:m-01606aca4649?action=drain', json={}, status=200) responses.add(responses.DELETE, 'https://127.0.0.1:4430/v3/nodes/c-ph9ck:m-01606aca4649', json={}, status=200) super().setUp()
def test_invalid_url(self): responses.add_passthru(config['solr_url']) self.data_dict = set_resource_url('http:invalid_url') proxied_url = proxy.get_proxified_resource_url(self.data_dict) result = self.app.get(proxied_url, status='*') assert result.status_int == 409, result.status assert 'Invalid URL' in result.body, result.body
def test_generate_uids(api): amount = 13000 url = '{}/system/id.json'.format(API_URL) responses.add_passthru(url) uids = api.generate_uids(amount) assert isinstance(uids, list) assert len(uids) == amount
def test_search_product_error(self): url = '{}:{}/api/objects/products'.format(CONST_BASE_URL, CONST_PORT) responses.add_passthru(url) responses.add(responses.GET, '{}/search/error'.format(url), status=400) products = self.gdm.products() self.assertRaises(HTTPError, products.search, "error")
def test_invalid_url(self, app): responses.add_passthru(config['solr_url']) self.data_dict = set_resource_url('http:invalid_url') proxied_url = proxy.get_proxified_resource_url(self.data_dict) result = app.get(proxied_url) assert result.status_code == 409 assert six.b('Invalid URL') in result.data
def test_set_userfields_error(self): base_url = f"{CONST_BASE_URL}:{CONST_PORT}/api" responses.add_passthru(f"{base_url}/objects/products") product = self.gdm.products().list[0] userfields_url = f"{base_url}/userfields/products/1" responses.add(responses.PUT, userfields_url, status=400) self.assertRaises(HTTPError, product.set_userfields, "auserfield", "value")
def _run_online_test(response_log_path: Path, test_function: Callable, asynchronous: bool = False, **kwargs) -> None: """Runs a test function against a Tamr instance and saves the API responses to a file Args: response_log_path: Location to save API responses test_function: The function to test asynchoronous: Whether or not to run asynchronously (i.e. don't `wait` for operations to finish) **kwargs: Keyword arguments for the test function """ LOGGER.info( f"Online test running against Tamr instance. " f"Creating new file at {response_log_path}. This may take a while ..." ) os.makedirs(response_log_path.parent, exist_ok=True) response_log_path.touch() # Each time an API call is made, allow it to pass through responses and make a real call # Each time a real call is made, log the response in the response file responses.add_passthru(re.compile(".*")) ip_lookup = {} def _send_real_with_log(*args, **kwargs) -> Response: """Logs the response from BASE_SEND_REAL Args: *args: The positional arguments for BASE_SEND_REAL **kwargs: The keyword arguments for BASE_SEND_REAL Returns: The response from the call """ response = _BASE_SEND_REAL(*args, **kwargs) # Prevent recursion with mock.patch("responses._real_send", new=_BASE_SEND_REAL): _log_response( log_path=response_log_path, response=response, ip_dict=ip_lookup, asynchronous=asynchronous, ) return response with mock.patch("responses._real_send", new=_send_real_with_log): test_function(**kwargs) # Setting the passthru above permanently changes state for online testing # Reset passthru to default responses.mock.passthru_prefixes = () responses._default_mock.passthru_prefixes = ()
def setUp(self): super().setUp() self.addon = amo.tests.addon_factory(file_kw={ 'filename': 'webextension.xpi', 'is_webextension': True, }) self.version = self.addon.current_version responses.add_passthru(settings.AUTOGRAPH_CONFIG['server_url'])
def test_passthr(self): responses.add_passthru('https://httpbin.org/anything') responses.add(responses.GET, 'https://nbaplayerprofile.com/api/1/bar', body='welcome to nba') resp = requests.get('https://nbaplayerprofile.com/api/1/bar') self.assertEqual(resp.text, 'welcome to nba') resp1 = requests.get('https://httpbin.org/ip') self.assertEqual(resp.json(), {})
def setUp(self): super().setUp() # Change addon file name self.addon = amo.tests.addon_factory(file_kw={'filename': 'webextension.xpi'}) self.addon.update(guid='xxxxx') self.version = self.addon.current_version self.file_ = self.version.all_files[0] responses.add_passthru(settings.AUTOGRAPH_CONFIG['server_url'])
def test_download_no_blobDateModified(text_dandiset: SampleDandiset, tmp_path: Path) -> None: # Regression test for #806 responses.add_passthru(re.compile("^http")) dandiset = text_dandiset.dandiset asset = dandiset.get_asset_by_path("file.txt") metadata = asset.get_raw_metadata() del metadata["blobDateModified"] responses.add(responses.GET, asset.api_url, json=metadata) download(dandiset.api_url, tmp_path)
def test_retrieval_non_existing_certificate(): responses.add_passthru("https://") request = Request( "Create", "arn:aws:acm:eu-central-1:111111111111:certificate/ffffffff-ffff-ffff-ffff-ffffffffffff", ) response = handler(request, {}) assert response["Status"] == "FAILED", response["Reason"] assert "ResourceNotFoundException" in response["Reason"]
def run(): responses.add_passthru(httpserver.url) responses.add(responses.GET, "{}/one".format(httpserver.url), body="one") responses.add(responses.GET, "http://example.com/two", body="two") resp = requests.get("http://example.com/two") assert_response(resp, "two") resp = requests.get("{}/one".format(httpserver.url)) assert_response(resp, "one") resp = requests.get(httpserver.url) assert_response(resp, "OK")
def run(): responses.add_passthru(httpserver.url) responses.add( responses.GET, '{}/one'.format(httpserver.url), body='one') responses.add(responses.GET, 'http://example.com/two', body='two') resp = requests.get('http://example.com/two') assert_response(resp, 'two') resp = requests.get('{}/one'.format(httpserver.url)) assert_response(resp, 'one') resp = requests.get(httpserver.url) assert_response(resp, 'OK')
def fake_hg_repo(tmpdir): tmp_path = tmpdir.strpath dest = os.path.join(tmp_path, "repos") local = os.path.join(dest, "local") remote = os.path.join(dest, "remote") for d in [local, remote]: os.makedirs(d) hglib.init(d) os.environ["USER"] = "******" hg = hglib.open(local) hg.branch(b"central") responses.add_passthru("http://localhost:8000") yield hg, local, remote hg.close()
def test_get_subjurisdictions_counties_web01(self): """A jurisdiction with sub-jurisdictions with Web01 in url should return a list of URLs""" # Avoid hitting all the summary URLs for each subjurisdiction. responses.add( method=responses.GET, url=re.compile(r"^https://results.enr.clarityelections.com/AR/(.+/[0-9]+/[0-9]+/Web01/en/summary.html|(.+/)?[0-9]+/[0-9]+/reports/summary.zip)$"), status=200, ) # Construct a Jurisdiction for Arkansas 2014 General Election url = "https://results.enr.clarityelections.com/AR/53237/149294/Web01/en/summary.html" responses.add_passthru(url) responses.add_passthru(url.replace("summary.html", "json/electionsettings.json")) jurisdiction = Jurisdiction(url=url, level="state") subjurisdictions = jurisdiction.get_subjurisdictions() # A state like AR has county sub-jurisdictions with results expected_subjurisdiction_count = 75 self.assertEqual(len(subjurisdictions), expected_subjurisdiction_count)
def _register_urls(self): responses.add_passthru(settings.AUTOGRAPH_CONFIG['server_url'])
def test_signer(self): responses.add_passthru(settings.AUTOGRAPH_CONFIG['server_url']) status, signer_result = monitors.signer() assert status == ''