def test_upload_error(monkeypatch): """Upload a file and create a tileset, fails with 409""" monkeypatch.setattr(mapbox.services.uploads, 'boto3_session', MockSession) # Credentials query_body = """ {{"key": "_pending/{username}/key.test", "accessKeyId": "ak.test", "bucket": "tilestream-tilesets-production", "url": "https://tilestream-tilesets-production.s3.amazonaws.com/_pending/{username}/key.test", "secretAccessKey": "sak.test", "sessionToken": "st.test"}}""".format(username=username) responses.add( responses.GET, 'https://api.mapbox.com/uploads/v1/{0}/credentials?access_token={1}'.format(username, access_token), match_querystring=True, body=query_body, status=200, content_type='application/json') responses.add( responses.POST, 'https://api.mapbox.com/uploads/v1/{0}?access_token={1}'.format(username, access_token), match_querystring=True, body="", status=409, content_type='application/json') with open('tests/moors.json', 'r') as src: res = mapbox.Uploader(access_token=access_token).upload(src, 'test1') assert res.status_code == 409
def test_update_added(self): t = Topology.objects.first() t.parser = 'netdiff.NetJsonParser' t.save() responses.add(responses.GET, 'http://127.0.0.1:9090', body=self._load('static/netjson-1-link.json'), content_type='application/json') Node.objects.all().delete() t.update() self.assertEqual(Node.objects.count(), 2) self.assertEqual(Link.objects.count(), 1) node1 = Node.objects.get(addresses__contains='192.168.0.1;') node2 = Node.objects.get(addresses__contains='192.168.0.2;') self.assertEqual(node1.local_addresses, ['10.0.0.1']) self.assertEqual(node1.properties, {'gateway': True}) link = Link.objects.first() self.assertIn(link.source, [node1, node2]) self.assertIn(link.target, [node1, node2]) self.assertEqual(link.cost, 1.0) self.assertEqual(link.properties, {'pretty': True}) # ensure repeating the action is idempotent t.update() self.assertEqual(Node.objects.count(), 2) self.assertEqual(Link.objects.count(), 1)
def test_update_removed(self): t = Topology.objects.first() t.parser = 'netdiff.NetJsonParser' t.save() responses.add(responses.GET, 'http://127.0.0.1:9090', body=self._load('static/netjson-2-links.json'), content_type='application/json') Node.objects.all().delete() t.update() self.assertEqual(Node.objects.count(), 3) self.assertEqual(Link.objects.count(), 2) # now change t.url = t.url.replace('9090', '9091') t.save() responses.add(responses.GET, 'http://127.0.0.1:9091', body=self._load('static/netjson-1-link.json'), content_type='application/json') t.update() self.assertEqual(Node.objects.count(), 3) self.assertEqual(Link.objects.count(), 2) self.assertEqual(Link.objects.filter(status='down').count(), 1) link = Link.objects.filter(status='down').first() self.assertIn('192.168.0.3', [link.source.netjson_id, link.target.netjson_id]) self.assertEqual(link.cost, 2.0)
def test_add_feature(self): responses.add( method="POST", url="http://bipolar.test/api/v1/feature/", body='{"boolean_permission": true, "creation": "2014-05-23T08:54:42.077131", "id": 10, "limit_permission": null, "name": "data.business", "permission_level": "qualifier", "permission_type": "boolean", "resource_uri": "/api/v1/feature/10/"}' ) resp = self.client.add_feature( name="data.business", permission_type='boolean', boolean_permission=True, limit_permission=None, permission_level='qualifier', ) expected = { 'name': 'data.business', 'permission_type': 'boolean', 'boolean_permission': True, 'limit_permission': None, 'permission_level': 'qualifier', } self.assertTrue("id" in resp.keys()) self.assertTrue("resource_uri" in resp.keys()) self.assertTrue("creation" in resp.keys()) for k, v in expected.items(): self.assertEqual((k, resp[k]), (k, v))
def test_update_topology_command(self): t = Topology.objects.first() t.parser = 'netdiff.NetJsonParser' t.save() responses.add(responses.GET, 'http://127.0.0.1:9090', body=self._load('static/netjson-1-link.json'), content_type='application/json') Node.objects.all().delete() update_topology() self.assertEqual(Node.objects.count(), 2) self.assertEqual(Link.objects.count(), 1) # test exception t.url = t.url.replace('9090', '9091') t.save() Node.objects.all().delete() Link.objects.all().delete() responses.add(responses.GET, 'http://127.0.0.1:9091', body=self._load('static/netjson-invalid.json'), content_type='application/json') # capture output output = StringIO() with redirect_stdout(output): call_command('update_topology') self.assertEqual(Node.objects.count(), 1) self.assertEqual(Link.objects.count(), 0) self.assertIn('Failed to', output.getvalue())
def test_list(self): responses.add(responses.GET, 'https://api.github.com/gists', body=json.dumps([ { 'id': 1, 'description': 'test-desc-A', 'public': True, }, { 'id': 2, 'description': 'test-desc-\u212C', 'public': False, }, ]), status=200, ) gists = gist.GistAPI(token='foo').list() gistA = gists[0] gistB = gists[1] self.assertEqual(gistA.id, 1) self.assertEqual(gistA.desc, 'test-desc-A') self.assertTrue(gistA.public) self.assertEqual(gistB.id, 2) self.assertEqual(gistB.desc, 'test-desc-\u212C') self.assertFalse(gistB.public)
def test_content(self): responses.add(responses.GET, 'https://api.github.com/gists/1', body=json.dumps({ "files": { "file-A.txt": { "filename": "file-A.txt", "content": b64encode("test-content-A"), }, "file-B.txt": { "filename": "file-B.txt", "content": b64encode("test-content-\u212C"), } }, "description": "test-gist", "public": True, "id": 1, }), status=200, ) content = gist.GistAPI(token='foo').content('1') self.assertEqual(len(content), 2) self.assertTrue('file-A.txt' in content) self.assertTrue('file-B.txt' in content) self.assertEqual(content['file-A.txt'], 'test-content-A') self.assertEqual(content['file-B.txt'], 'test-content-\u212C')
def test_content(self): def b64encode(s): return base64.b64encode(s.encode('utf-8')).decode('utf-8') responses.add(responses.GET, 'https://api.github.com/gists/1', body=json.dumps({ "files": { "file-A.txt": { "filename": "file-A.txt", "content": b64encode("test-content-A"), }, "file-B.txt": { "filename": "file-B.txt", "content": b64encode("test-content-\u212C"), } }, "description": "test-gist", "public": True, "id": 1, }), status=200, ) lines = self.command_response('content 1') self.assertIn('file-A.txt:', lines) self.assertIn('test-content-A', lines) self.assertIn('file-B.txt:', lines) self.assertIn('test-content-\u212C', lines)
def test_content(self): """ When encrypted content is received, check to make sure that it can be properly decrypted. """ def b64encrypt(content): return b64encode(self.encrypt(content)) responses.add(responses.GET, 'https://api.github.com/gists/1', body=json.dumps({ "files": { "file-A.txt": { "filename": "file-A.txt", "content": b64encrypt(u'test-content-A'), }, "file-B.txt": { "filename": "file-B.txt", "content": b64encrypt(u'test-content-\u212C'), }, }, "description": "test-gist", "public": True, "id": 1, }), status=200, ) lines = self.command_response('content 1 --decrypt') self.assertIn(u'file-A.txt (decrypted):', lines) self.assertIn(u'test-content-A', lines) self.assertIn(u'file-B.txt (decrypted):', lines) self.assertIn(u'test-content-\u212C', lines)
def test_signal_oauth_error_authorized(request): responses.add( responses.POST, "https://example.com/oauth/access_token", body="Invalid request token.", status=401, ) app, bp = make_app() calls = [] def callback(*args, **kwargs): calls.append((args, kwargs)) oauth_error.connect(callback) request.addfinalizer(lambda: oauth_error.disconnect(callback)) with app.test_client() as client: resp = client.get( "/login/test-service/authorized?" "oauth_token=faketoken&" "oauth_token_secret=fakesecret&" "oauth_verifier=fakeverifier", base_url="https://a.b.c", ) assert len(calls) == 1 assert calls[0][0] == (bp,) assert ( calls[0][1]["message"] == "Token request failed with code 401, response was 'Invalid request token.'." ) assert resp.status_code == 302
def test_login_url(): responses.add( responses.POST, "https://example.com/oauth/request_token", body="oauth_token=foobar&oauth_token_secret=bazqux", ) app, _ = make_app() client = app.test_client() resp = client.get( "/login/test-service", base_url="https://a.b.c", follow_redirects=False ) # check that we obtained a request token assert len(responses.calls) == 1 assert "Authorization" in responses.calls[0].request.headers auth_header = dict( parse_authorization_header( responses.calls[0].request.headers["Authorization"].decode("utf-8") ) ) assert auth_header["oauth_consumer_key"] == "client_key" assert "oauth_signature" in auth_header assert auth_header["oauth_callback"] == quote_plus( "https://a.b.c/login/test-service/authorized" ) # check that we redirected the client assert resp.status_code == 302 assert ( resp.headers["Location"] == "https://example.com/oauth/authorize?oauth_token=foobar" )
def test_redirect_fallback(): responses.add( responses.POST, "https://example.com/oauth/access_token", body="oauth_token=xxx&oauth_token_secret=yyy", ) blueprint = OAuth1ConsumerBlueprint( "test-service", __name__, client_key="client_key", client_secret="client_secret", base_url="https://example.com", request_token_url="https://example.com/oauth/request_token", access_token_url="https://example.com/oauth/access_token", authorization_url="https://example.com/oauth/authorize", ) app = flask.Flask(__name__) app.secret_key = "secret" app.register_blueprint(blueprint, url_prefix="/login") with app.test_client() as client: resp = client.get( "/login/test-service/authorized?oauth_token=foobar&oauth_verifier=xyz", base_url="https://a.b.c", ) # check that we redirected the client assert resp.status_code == 302 assert resp.headers["Location"] == "https://a.b.c/"
def test_signal_oauth_error_login(request): responses.add( responses.POST, "https://example.com/oauth/request_token", body="oauth_problem=nonce_used", status=401, ) app, bp = make_app() calls = [] def callback(*args, **kwargs): calls.append((args, kwargs)) oauth_error.connect(callback) request.addfinalizer(lambda: oauth_error.disconnect(callback)) with app.test_client() as client: resp = client.get("/login/test-service", base_url="https://a.b.c") assert len(calls) == 1 assert calls[0][0] == (bp,) assert ( calls[0][1]["message"] == "Token request failed with code 401, response was 'oauth_problem=nonce_used'." ) assert resp.status_code == 302 location = resp.headers["Location"] assert location == "https://a.b.c/"
def test_assignee_search(self): responses.add( responses.GET, 'https://example.atlassian.net/rest/api/2/project', json=[{'key': 'HSP', 'id': '10000'}], match_querystring=False ) def responder(request): query = parse_qs(urlparse(request.url).query) assert 'HSP' == query['project'][0] assert 'bob' == query['query'][0] return (200, {}, SAMPLE_USER_SEARCH_RESPONSE) responses.add_callback( responses.GET, 'https://example.atlassian.net/rest/api/2/user/assignable/search', callback=responder, content_type='json', match_querystring=False ) org = self.organization self.login_as(self.user) path = reverse('sentry-extensions-jira-search', args=[org.slug, self.integration.id]) resp = self.client.get('%s?project=10000&field=assignee&query=bob' % (path,)) assert resp.status_code == 200 assert resp.data == [ {'value': 'deadbeef123', 'label': 'Bobby - [email protected]'} ]
def test_authorized_url(): responses.add( responses.POST, "https://example.com/oauth/access_token", body="oauth_token=xxx&oauth_token_secret=yyy", ) app, _ = make_app() with app.test_client() as client: resp = client.get( "/login/test-service/authorized?oauth_token=foobar&oauth_verifier=xyz", base_url="https://a.b.c", ) # check that we redirected the client assert resp.status_code == 302 assert resp.headers["Location"] == "https://a.b.c/" # check that we obtained an access token assert len(responses.calls) == 1 assert "Authorization" in responses.calls[0].request.headers auth_header = dict( parse_authorization_header( responses.calls[0].request.headers["Authorization"].decode("utf-8") ) ) assert auth_header["oauth_consumer_key"] == "client_key" assert auth_header["oauth_token"] == "foobar" assert auth_header["oauth_verifier"] == "xyz" # check that we stored the access token and secret in the session assert flask.session["test-service_oauth_token"] == { "oauth_token": "xxx", "oauth_token_secret": "yyy", }
def run(): url = 'http://example.com?test=1' responses.add( responses.GET, url, match_querystring=True, body=b'test') resp = requests.get(url) assert_response(resp, 'test')
def run(): responses.add( responses.GET, 'http://example.com/?test=1', match_querystring=True) with pytest.raises(ConnectionError): requests.get('http://example.com/foo/?test=2')
def testLimitsViaHeadersWithSleepLimitReached(self): api = twitter.Api( consumer_key='test', consumer_secret='test', access_token_key='test', access_token_secret='test', sleep_on_rate_limit=True) # Add handler for ratelimit check - this forces the codepath which goes through the time.sleep call url = '%s/application/rate_limit_status.json?tweet_mode=compat' % api.base_url responses.add( method=responses.GET, url=url, body='{"resources": {"search": {"/search/tweets": {"limit": 1, "remaining": 0, "reset": 1}}}}', match_querystring=True) # Get initial rate limit data to populate api.rate_limit object url = "https://api.twitter.com/1.1/search/tweets.json?tweet_mode=compat&q=test&count=15&result_type=mixed" responses.add( method=responses.GET, url=url, body='{}', match_querystring=True, adding_headers=HEADERS) resp = api.GetSearch(term='test') self.assertTrue(api.rate_limit) self.assertEqual(resp, [])
def test_droplets(self): data = self.load_from_file('droplets/all.json') url = self.base_url + 'droplets/' responses.add(responses.GET, url, body=data, status=200, content_type='application/json') droplets = self.manager.get_all_droplets() droplet = droplets[0] self.assertEqual(droplet.token, self.token) self.assertEqual(droplet.id, 3164444) self.assertEqual(droplet.name, "example.com") self.assertEqual(droplet.memory, 512) self.assertEqual(droplet.vcpus, 1) self.assertEqual(droplet.disk, 20) self.assertEqual(droplet.backups, True) self.assertEqual(droplet.ipv6, True) self.assertEqual(droplet.private_networking, False) self.assertEqual(droplet.region['slug'], "nyc3") self.assertEqual(droplet.status, "active") self.assertEqual(droplet.image['slug'], "ubuntu-14-04-x64") self.assertEqual(droplet.size_slug, '512mb') self.assertEqual(droplet.created_at, "2014-11-14T16:29:21Z") self.assertEqual(droplet.ip_address, "104.236.32.182") self.assertEqual(droplet.ip_v6_address, "2604:A880:0800:0010:0000:0000:02DD:4001") self.assertEqual(droplet.kernel['id'], 2233) self.assertEqual(droplet.backup_ids, [7938002]) self.assertEqual(droplet.features, ["backups", "ipv6", "virtio"])
def testLimitsViaHeadersNoSleep(self): api = twitter.Api( consumer_key='test', consumer_secret='test', access_token_key='test', access_token_secret='test', sleep_on_rate_limit=False) # Get initial rate limit data to populate api.rate_limit object with open('testdata/ratelimit.json') as f: resp_data = f.read() url = '%s/application/rate_limit_status.json' % self.api.base_url responses.add(responses.GET, url, body=resp_data, match_querystring=True) # Add a test URL just to have headers present responses.add( method=responses.GET, url=DEFAULT_URL, body='{}', adding_headers=HEADERS) resp = api.GetSearch(term='test') self.assertTrue(api.rate_limit.__dict__) self.assertEqual(api.rate_limit.get_limit('/search/tweets').limit, 63) self.assertEqual(api.rate_limit.get_limit('/search/tweets').remaining, 63) self.assertEqual(api.rate_limit.get_limit('/search/tweets').reset, 626672700) # No other resource families should be set during above operation. test_url = '/lists/subscribers/show.json' self.assertEqual( api.rate_limit.__dict__.get('resources').get(test_url), None ) # But if we check them, it should go to default 15/15 self.assertEqual(api.rate_limit.get_limit(test_url).remaining, 15) self.assertEqual(api.rate_limit.get_limit(test_url).limit, 15)
def testLimitsViaHeadersWithSleep(self): api = twitter.Api( consumer_key='test', consumer_secret='test', access_token_key='test', access_token_secret='test', sleep_on_rate_limit=True) # Add handler for ratelimit check url = '%s/application/rate_limit_status.json?tweet_mode=compat' % api.base_url responses.add( method=responses.GET, url=url, body='{}', match_querystring=True) # Get initial rate limit data to populate api.rate_limit object url = "https://api.twitter.com/1.1/search/tweets.json?tweet_mode=compat&q=test&count=15&result_type=mixed" responses.add( method=responses.GET, url=url, body='{}', match_querystring=True, adding_headers=HEADERS) resp = api.GetSearch(term='test') self.assertTrue(api.rate_limit) self.assertEqual(api.rate_limit.get_limit('/search/tweets').limit, 63) self.assertEqual(api.rate_limit.get_limit('/search/tweets').remaining, 63) self.assertEqual(api.rate_limit.get_limit('/search/tweets').reset, 626672700)
def mock_file_apis(job_command, job_func, job_func_arg): request_uri = base_uri + "/sessions/" + str(session_id) + \ "/" + job_command responses.add(responses.POST, request_uri, status=201, body='', content_type='application/json') test_file_api_future = job_func(job_func_arg) return test_file_api_future
def assert_url_correct(self, method, url_, request, *args, **kwargs): # Mock the request. responses.add(method, url_) # Make the request. ApiErrors are ok. try: request(*args, **kwargs) except errors.ApiError: pass # Split the params from the URL. parts = responses.calls[0].request.url.split('?', 1) if len(parts) == 1: requested_url, param_str = parts[0], None else: requested_url, param_str = parts # Check the URL. self.assertEqual(requested_url, url_) # Check the params. Allow for the API token to be found. if param_str is None: return kvparams = map(lambda s: s.split('='), param_str.split('&')) for k, v in filter(lambda i: i[0] != 'token', kvparams): self.assertIn(k, kwargs) self.assertEqual(str(kwargs[k]), urllib.parse.unquote(v))
def test_ReleaseNews(mock_send_msg, versions_json, versions_json_content, heise_html): url = 'http://www.heise.de/download/firefox.html' responses.add(responses.GET, url, body=heise_html, status=200) release_news = ReleaseNews(versions_file=versions_json_content) assert release_news.version_dict == { u'jre': u'Java Runtime Environment (JRE) 8u25', u'firefox': u'Firefox Setup 34.0.5.exe', u'acrobat_reader': u'11.0.10'} release_news = ReleaseNews(versions_file=versions_json) assert release_news.version_dict == {} @release_news.check_this def firefox(): url = 'http://www.heise.de/download/firefox.html' return return_check_this(get_version_from_heise(url), url) release_news.checker('*****@*****.**', 'test', '*****@*****.**') assert release_news.version_dict == {'firefox': u'Firefox 34.0.5'}
def test_insecure_flag(self): # Given responses.add(responses.GET, "https://acme.com/accounts/user/info/", body=json.dumps(R_JSON_AUTH_RESP)) config = Configuration() config.update(store_url="https://acme.com") config.update(auth=("nono", "le gros robot")) # When with self.assertRaises(SystemExit) as e: with mock.patch("enstaller.main._ensure_config_or_die", return_value=config): with mock.patch( "enstaller.main.ensure_authenticated_config" ): main_noexc(["-s", "fubar"]) # Then self.assertEqual(e.exception.code, 0) # When with self.assertRaises(SystemExit) as e: with mock.patch("enstaller.main._ensure_config_or_die", return_value=config): with mock.patch( "enstaller.main.ensure_authenticated_config" ): main_noexc(["-ks", "fubar"]) # Then self.assertEqual(e.exception.code, 0)
def test_after_link_issue(self): issue_id = 3 repo = 'myaccount/myrepo' comment = {'comment': 'hello I\'m a comment'} responses.add( responses.POST, u'https://api.bitbucket.org/2.0/repositories/{repo}/issues/{issue_id}/comments'.format( repo=repo, issue_id=issue_id), status=201, json={'content': {'raw': comment}}, ) external_issue = ExternalIssue.objects.create( organization_id=self.organization.id, integration_id=self.integration.id, key='%s#%d' % (repo, issue_id), ) self.integration.get_installation( external_issue.organization_id).after_link_issue( external_issue, data=comment) request = responses.calls[0].request assert responses.calls[0].response.status_code == 201 payload = json.loads(request.body) assert payload == {'content': {'raw': comment['comment']}}
def test_text_playlist_with_mpeg_stream( self, scanner, provider, caplog): scanner.scan.side_effect = [ # Scanning playlist mock.Mock(mime='text/foo', playable=False), # Scanning stream mock.Mock(mime='audio/mpeg', playable=True), ] responses.add( responses.GET, PLAYLIST_URI, body=BODY, content_type='audio/x-mpegurl') result = provider.translate_uri(PLAYLIST_URI) assert scanner.scan.mock_calls == [ mock.call(PLAYLIST_URI, timeout=mock.ANY), mock.call(STREAM_URI, timeout=mock.ANY), ] assert result == STREAM_URI # Check logging to ensure debuggability assert 'Unwrapping stream from URI: %s' % PLAYLIST_URI assert 'Parsed playlist (%s)' % PLAYLIST_URI in caplog.text() assert 'Unwrapping stream from URI: %s' % STREAM_URI assert ( 'Unwrapped potential audio/mpeg stream: %s' % STREAM_URI in caplog.text()) # Check proper Requests session setup assert responses.calls[0].request.headers['User-Agent'].startswith( 'Mopidy-Stream/')
def mock_submit_job_and_poll_result( job, job_state, result=None, error=None ): submit_request_mock_uri = base_uri + "/sessions/" + str(session_id) \ + "/submit-job" poll_request_mock_uri = base_uri + "/sessions/" + str(session_id) \ + "/jobs/" + str(job_id) post_json_data = { u'state': u'SENT', u'error': None, u'id': job_id, u'result': None } responses.add(responses.POST, submit_request_mock_uri, status=201, json=post_json_data, content_type='application/json') get_json_data = { u'state': job_state, u'error': error, u'id': job_id, u'result': result } responses.add(responses.GET, poll_request_mock_uri, status=200, json=get_json_data, content_type='application/json') submit_job_future = client_test.submit(job) return submit_job_future
def test_upload(): responses.add(responses.POST, upload.PYPI, status=200) with patch('flit.upload.get_repository', return_value=repo_settings): wheel.wheel_main(samples_dir / 'module1-pkg.ini', upload='pypi') assert len(responses.calls) == 1
def test_get_anime(self): """ Tests the flask 'Controller' get_anime where it gets an anime and decides whether to fecth or update its subgroups from anidb """ subs = [Sub_Group(gid="1234",name="poo subs",state="done",lastep="8",eprange="1-3", audio="japanese",subs="english")] expected = Anime(aid="1234",eng_title="english",jpn_title="japan",eng_jpn="jpneng") #set fake response from anidb client responses.add(responses.GET, url='http://anidb.net/perl-bin/animedb.pl?show=anime&aid='+str(expected.aid)+"&showallag=1#grouplist", body=Data.anime_page,match_querystring=True,status=200) # handle db setup bad practice to commit self.db.session.add(expected) #dao = DAO(self.db.session) # dao.get_anime(aid="1234") self.db.session.commit() self.assertNotEqual(expected.id,None) print "anime pk: "+str(expected.id) response = server.get_anime(expected.id) print str(response) self.assertEqual(response[1],200) #bug subgroup foreign key is a string needs to be integer self.db.session.close()
def test_create_issue_default_project_not_in_api_call(self): group_description = ( u'Sentry Issue: [%s](%s)\n\n' '```\nStacktrace (most recent call last):\n\n' ' File "sentry/models/foo.py", line 29, in build_msg\n' ' string_max_length=self.string_max_length)\n\nmessage\n```' ) % ( self.group.qualified_short_id, absolute_uri( self.group.get_absolute_url( params={'referrer': 'gitlab_integration'})), ) project_id = 10 project_name = 'This_is / a_project' org_integration = self.installation.org_integration org_integration.config['project_issue_defaults'] = { six.text_type(self.group.project_id): { 'project': project_id } } org_integration.save() responses.add(responses.GET, u'https://example.gitlab.com/api/v4/groups/%s/projects' % self.installation.model.metadata['group_id'], json=[ { 'name_with_namespace': 'getsentry / sentry', 'id': 1 }, { 'name_with_namespace': 'getsentry / hello', 'id': 22 }, ]) responses.add(responses.GET, u'https://example.gitlab.com/api/v4/projects/%s' % project_id, json={ 'name_with_namespace': project_name, 'id': project_id }) assert self.installation.get_create_issue_config(self.group) == [{ 'url': '/extensions/gitlab/search/baz/%d/' % self.installation.model.id, 'name': 'project', 'required': True, 'choices': [(10, u'This_is / a_project'), (1, u'getsentry / sentry'), (22, u'getsentry / hello')], 'defaultValue': project_id, 'type': 'select', 'label': 'Gitlab Project' }, { 'name': 'title', 'label': 'Title', 'default': self.group.get_latest_event().error(), 'type': 'string', 'required': True, }, { 'name': 'description', 'label': 'Description', 'default': group_description, 'type': 'textarea', 'autosize': True, 'maxRows': 10, }]
def test_get_asks(self): responses.add(responses.GET, 'http://localhost:8085/market/asks', json={'asks': [{'test': 'true'}]}) self.assertEqual(Market.asks(), [{'test': 'true'}])
def test_market_running_false(self): responses.add(responses.HEAD, 'http://localhost:8085/market/asks', status=200) responses.add(responses.HEAD, 'http://localhost:8085/market/bids', status=400) self.assertFalse(Market.is_market_running())
def test_run_anonymous_apex_success(self): task, url = self._get_url_and_task() resp = {"compiled": True, "success": True} responses.add(responses.GET, url, status=200, json=resp) task()
def _mock_run_tests(self, success=True, body="JOB_ID1234567"): url = self.base_tooling_url + "runTestsAsynchronous" if success: responses.add(responses.POST, url, json=body) else: responses.add(responses.POST, url, status=http.client.SERVICE_UNAVAILABLE)
def test_run_batch_apex_calc_elapsed_time(self): task, url = self._get_url_and_task() response = self._get_query_resp() responses.add(responses.GET, url, json=response) task() self.assertEqual(task.elapsed_time(task.subjobs), 61)
def test_run_batch_apex_status_ok(self): task, url = self._get_url_and_task() response = self._get_query_resp() responses.add(responses.GET, url, json=response) task()
def mock_response(self, url, body): responses.add(responses.GET, url, body=body, content_type='text/html', match_querystring=True)
def test_target_release_exists(self, commit_dir, extract_github): responses.add( method=responses.GET, url=self.repo_api_url, json=self._get_expected_repo(owner=self.repo_owner, name=self.repo_name), ) responses.add( responses.GET, self.repo_api_url + "/releases/latest", json=self._get_expected_release("release/1.0"), ) responses.add( method=responses.GET, url=self.public_repo_url, json=self._get_expected_repo( owner=self.public_owner, name=self.public_name ), ) responses.add( method=responses.GET, url=self.public_repo_url + "/git/refs/tags/release/1.0", status=201, json=self._get_expected_tag_ref("release/1.0", "SHA"), ) responses.add( method=responses.GET, url=self.repo_api_url + "/git/refs/tags/release/1.0", status=201, json=self._get_expected_tag_ref("release/1.0", "SHA"), ) responses.add( method=responses.GET, url=self.repo_api_url + "/git/tags/SHA", json=self._get_expected_tag("release/1.0", "SHA"), status=201, ) responses.add( responses.GET, self.repo_api_url + "/releases/tags/release/1.0", json=self._get_expected_release("release/1.0"), ) responses.add( responses.GET, self.public_repo_url + "/releases/tags/release/1.0", json=self._get_expected_release("release/1.0"), ) task_config = TaskConfig( { "options": { "branch": "master", "version": "latest", "repo_url": self.public_repo_url, "includes": ["tasks/foo.py", "unpackaged/pre/foo/package.xml"], } } ) extract_github.return_value.namelist.return_value = [ "tasks/foo.py", "unpackaged/pre/foo/package.xml", "force-app", ] task = PublishSubtree(self.project_config, task_config) with pytest.raises(GithubException) as exc: task() assert "Ref for tag release/1.0 already exists in target repo" == str(exc.value)
def mock_response(self, url, json): responses.add(responses.GET, url, body=json, content_type='application/json')
def test_08_auth_fail(self): responses.add(responses.GET, self.basic_url, status=401) self.assertRaises(SMSError, self.missing_provider.submit_message, "123456", "Hello")
def test_non_2xx_response_error(self): responses.add(responses.POST, "http://foo", status=400) with pytest.raises(ReceivedNon2xxResponseError): HTTPClient("http://foo").request("foo")
def test_07_missing_fail(self): responses.add(responses.GET, self.missing_url) self.assertRaises(SMSError, self.missing_provider.submit_message, "123456", "Hello")
def test_11_send_nonascii_sms_post_success(self): responses.add(responses.POST, self.post_url, body=self.success_body) # Here we need to send the SMS r = self.post_provider.submit_message("123456", u"Hallöle Smørrebrød") self.assertTrue(r)
def test_04_send_sms_get_fail(self): responses.add(responses.GET, self.get_url, body=self.fail_body) # Here we need to send the SMS self.assertRaises(SMSError, self.get_provider.submit_message, "123456", "Hello")
def test_08_auth_success(self): responses.add(responses.GET, self.basic_url) r = self.auth_provider.submit_message("123456", "Hello") self.assertTrue(r)
def test_02_fail(self): responses.add(responses.POST, self.url, status=402) # Here we need to send the SMS self.assertRaises(SMSError, self.provider.submit_message, "123456", "Hello")
def test_05_simple_service_success(self): responses.add(responses.GET, self.simple_url, status=200) r = self.simple_provider.submit_message("123456", "Hello") self.assertTrue(r)
def test_correct_message_printed_if_server_error(self): """Correct console message printed if server error returned.""" responses.add(responses.GET, URL, status=500) with patch("sys.stdout", new=StringIO()) as mock_out: cost_abroad.create.prices_raw("A010101") self.assertIn("500 outside", mock_out.getvalue())
def test_03_send_sms_get_success(self): responses.add(responses.GET, self.get_url, body=self.success_body) # Here we need to send the SMS r = self.get_provider.submit_message("123456", "Hello") self.assertTrue(r)
def test_get_all(self): body = ''' { "type": "FeatureCollection", "features": [ { "type": "Feature", "geometry": { "type": "Polygon", "coordinates": [ [ [ -105.228338241577, 21.1714137482368 ], [ -105.229024887085, 21.1694127979643 ], [ -105.228338241577, 21.1714137482368 ] ] ] }, "properties": { "id": 1, "spatial_source": 4, "user_id": "1", "area": null, "land_use": null, "gov_pin": null, "active": true, "time_created": "2015-08-06T15:41:26.440037-07:00", "time_updated": null, "created_by": 1, "updated_by": null } }] } ''' responses.add(responses.GET, 'http://cadasta.api/parcels', body=body, content_type="application/json") result = helpers.call_action( 'cadasta_show_parcel', ) assert_equal({ u'features': [{ u'geometry': { u'coordinates': [[[-105.228338241577, 21.1714137482368], [-105.229024887085, 21.1694127979643], [-105.228338241577, 21.1714137482368]]], u'type': u'Polygon'}, u'properties': { u'active': True, u'area': None, u'created_by': 1, u'gov_pin': None, u'id': 1, u'land_use': None, u'spatial_source': 4, u'time_created': u'2015-08-06T15:41:26.440037-07:00', u'time_updated': None, u'updated_by': None, u'user_id': u'1'}, u'type': u'Feature' }], u'type': u'FeatureCollection'}, result)
def test_01_success(self): responses.add(responses.POST, self.url) # Here we need to send the SMS r = self.provider.submit_message("123456", "Hello") self.assertTrue(r)
def test_get_one(self): body = ''' { "type": "FeatureCollection", "features": [ { "type": "Feature", "geometry": { "type": "Point", "coordinates": [ -73.724739, 40.588342 ] }, "properties": { "id": 1, "spatial_source": 1, "user_id": "11", "area": null, "land_use": null, "gov_pin": null, "active": true, "sys_delete": false, "time_created": "2015-09-01T09:53:16.466337-07:00", "time_updated": null, "created_by": 11, "updated_by": null, "parcel_history": [ { "id": 1, "parcel_id": 1, "origin_id": 1, "parent_id": null, "version": 1, "description": "new description", "date_modified": "2015-09-01T07:00:00.000Z", "active": true, "time_created": "2015-09-01T16:53:16.466Z", "time_updated": null, "created_by": 11, "updated_by": null } ], "relationships": [ { "id": 1, "parcel_id": 1, "party_id": 1, "geom_id": null, "tenure_type": 1, "acquired_date": null, "how_acquired": null, "active": true, "sys_delete": false, "time_created": "2015-09-01T16:53:16.466Z", "time_updated": null, "created_by": 11, "updated_by": null } ] } } ] } ''' responses.add(responses.GET, 'http://cadasta.api/parcels/1/details', body=body, content_type="application/json") result = helpers.call_action( 'cadasta_show_parcel_detail', id=1, project_id=1, ) assert_equal({ u'features': [{ u'geometry': { u'coordinates': [-73.724739, 40.588342], u'type': u'Point' }, u'properties': { u'active': True, u'area': None, u'created_by': 11, u'gov_pin': None, u'id': 1, u'land_use': None, u'parcel_history': [{ u'active': True, u'created_by': 11, u'date_modified': u'2015-09-01T07:00:00.000Z', u'description': u'new description', u'id': 1, u'origin_id': 1, u'parcel_id': 1, u'parent_id': None, u'time_created': u'2015-09-01T16:53:16.466Z', u'time_updated': None, u'updated_by': None, u'version': 1} ], u'relationships': [{ u'acquired_date': None, u'active': True, u'created_by': 11, u'geom_id': None, u'how_acquired': None, u'id': 1, u'parcel_id': 1, u'party_id': 1, u'sys_delete': False, u'tenure_type': 1, u'time_created': u'2015-09-01T16:53:16.466Z', u'time_updated': None, u'updated_by': None}], u'spatial_source': 1, u'sys_delete': False, u'time_created': u'2015-09-01T09:53:16.466337-07:00', u'time_updated': None, u'updated_by': None, u'user_id': u'11'}, u'type': u'Feature'}], u'type': u'FeatureCollection'}, result )
def test_json_returned_if_valid_code_provided(self): """JSON is returned if valid price category given as argument.""" responses.add(responses.GET, URL, body=r'{"value": {"0": 77}}') result = cost_abroad.create.prices_raw("A010201") self.assertEqual(result, {"value": {"0": 77}})
def test_update_org_config_vars_exist(self): """Test the case wherein the secrets and env vars already exist""" with self.tasks(): self.assert_setup_flow() uuid = self.mock_uuid4.hex secret_names = [ "sentry_org_%s" % uuid, "sentry_project_%s" % uuid, "sentry_dsn_%s" % uuid, "sentry_auth_token_%s" % uuid, ] env_var_names = [ "SENTRY_ORG", "SENTRY_PROJECT", "SENTRY_DSN", "SENTRY_AUTH_TOKEN", ] responses.add( responses.GET, "https://api.vercel.com/v1/projects/%s" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "link": { "type": "github" }, "framework": "gatsby" }, ) for i, name in enumerate(secret_names): responses.add( responses.POST, "https://api.vercel.com/v2/now/secrets", json={"uid": "sec_%s" % i}, ) for i, env_var_name in enumerate(env_var_names): responses.add( responses.GET, "https://api.vercel.com/v5/projects/%s/env" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "envs": [{ "value": "sec_%s" % i, "target": "production", "key": env_var_name }], }, ) for i, env_var_name in enumerate(env_var_names): responses.add( responses.DELETE, "https://api.vercel.com/v4/projects/%s/env/%s?target=%s" % ("Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", env_var_name, "production"), ) responses.add( responses.POST, "https://api.vercel.com/v4/projects/%s/env" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "value": "sec_%s" % i, "target": "production", "key": env_var_name }, ) responses.add( responses.DELETE, "https://api.vercel.com/v4/projects/%s/env/%s?target=%s" % ("Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", "VERCEL_GITHUB_COMMIT_SHA", ""), ) responses.add( responses.POST, "https://api.vercel.com/v4/projects/%s/env" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "value": "", "target": "production", "key": "VERCEL_GITHUB_COMMIT_SHA" }, ) org = self.organization project_id = self.project.id data = { "project_mappings": [[project_id, "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H"]] } integration = Integration.objects.get(provider=self.provider.key) installation = integration.get_installation(org.id) org_integration = OrganizationIntegration.objects.get( organization_id=org.id, integration_id=integration.id) assert org_integration.config == {} with patch("sentry.integrations.vercel.integration.uuid4", new=self.mock_uuid4): installation.update_organization_config(data) org_integration = OrganizationIntegration.objects.get( organization_id=org.id, integration_id=integration.id) assert org_integration.config == { "project_mappings": [[project_id, "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H"]] } req_params = json.loads(responses.calls[13].request.body) assert req_params["key"] == "SENTRY_ORG" assert req_params["value"] == "sec_0" assert req_params["target"] == "production" req_params = json.loads(responses.calls[16].request.body) assert req_params["key"] == "SENTRY_PROJECT" assert req_params["value"] == "sec_1" assert req_params["target"] == "production" req_params = json.loads(responses.calls[19].request.body) assert req_params["key"] == "SENTRY_DSN" assert req_params["value"] == "sec_2" assert req_params["target"] == "production" req_params = json.loads(responses.calls[22].request.body) assert req_params["key"] == "SENTRY_AUTH_TOKEN" assert req_params["value"] == "sec_3" assert req_params["target"] == "production" req_params = json.loads(responses.calls[24].request.body) assert req_params["key"] == "VERCEL_GITHUB_COMMIT_SHA" assert req_params["value"] == "" assert req_params["target"] == "production"
def test_get_one(self): body = ''' { "type": "FeatureCollection", "features": [ { "type": "Feature", "geometry": null, "properties": { "relationship_id": 1, "origin_id": 1, "version": 1, "parent_id": null, "parcel_id": 1, "expiration_date": null, "description": "History", "date_modified": "2015-09-02", "active": true, "time_created": "2015-09-02T18:09:15.057843+00:00", "time_updated": "2015-09-02T18:09:15.057843+00:00", "created_by": 11, "updated_by": null, "relationship_type": "own", "spatial_source": "survey_sketch", "party_id": 1, "first_name": "Thurmond", "last_name": "Thomas" } } ] } ''' responses.add(responses.GET, 'http://cadasta.api/parcels/1/show_relationship_history', body=body, content_type="application/json") result = helpers.call_action( 'cadasta_show_parcel_relationship_history', id=1, fields="test", sort_by="test", sort_dir="test", limit=10, ) expected = { u'features': [{ u'geometry': None, u'properties': { u'active': True, u'created_by': 11, u'date_modified': u'2015-09-02', u'description': u'History', u'expiration_date': None, u'first_name': u'Thurmond', u'last_name': u'Thomas', u'origin_id': 1, u'parcel_id': 1, u'parent_id': None, u'party_id': 1, u'relationship_id': 1, u'relationship_type': u'own', u'spatial_source': u'survey_sketch', u'time_created': u'2015-09-02T18:09:15.057843+00:00', u'time_updated': u'2015-09-02T18:09:15.057843+00:00', u'updated_by': None, u'version': 1}, u'type': u'Feature'}], u'type': u'FeatureCollection'} assert_equal(expected, result)
def test_update_organization_config(self): """Test that Vercel environment variables are created""" with self.tasks(): self.assert_setup_flow() uuid = self.mock_uuid4.hex secret_names = [ "sentry_org_%s" % uuid, "sentry_project_%s" % uuid, "next_public_sentry_dsn_%s" % uuid, "sentry_auth_token_%s" % uuid, ] responses.add( responses.GET, "https://api.vercel.com/v1/projects/%s" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "link": { "type": "github" }, "framework": "nextjs" }, ) for i, name in enumerate(secret_names): responses.add( responses.POST, "https://api.vercel.com/v2/now/secrets", json={"uid": "sec_%s" % i}, ) # mock get envs for all responses.add( responses.GET, "https://api.vercel.com/v5/projects/%s/env" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={"envs": []}, ) for i, name in enumerate(secret_names): responses.add( responses.POST, "https://api.vercel.com/v4/projects/%s/env" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "value": "sec_%s" % i, "target": "production", "key": name }, ) responses.add( responses.POST, "https://api.vercel.com/v4/projects/%s/env" % "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H", json={ "value": "", "target": "production", "key": "VERCEL_GITHUB_COMMIT_SHA" }, ) org = self.organization project_id = self.project.id data = { "project_mappings": [[project_id, "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H"]] } enabled_dsn = ProjectKey.get_default(project=Project.objects.get( id=project_id)).get_dsn(public=True) sentry_auth_token = SentryAppInstallationForProvider.objects.get( organization=org.id, provider="vercel") sentry_auth_token = sentry_auth_token.sentry_app_installation.api_token.token integration = Integration.objects.get(provider=self.provider.key) installation = integration.get_installation(org.id) org_integration = OrganizationIntegration.objects.get( organization_id=org.id, integration_id=integration.id) assert org_integration.config == {} with patch("sentry.integrations.vercel.integration.uuid4", new=self.mock_uuid4): installation.update_organization_config(data) org_integration = OrganizationIntegration.objects.get( organization_id=org.id, integration_id=integration.id) assert org_integration.config == { "project_mappings": [[project_id, "Qme9NXBpguaRxcXssZ1NWHVaM98MAL6PHDXUs1jPrgiM8H"]] } req_params = json.loads(responses.calls[7].request.body) assert req_params["name"] == "SENTRY_ORG_%s" % uuid assert req_params["value"] == org.slug req_params = json.loads(responses.calls[8].request.body) assert req_params["name"] == "SENTRY_PROJECT_%s" % uuid assert req_params["value"] == self.project.slug req_params = json.loads(responses.calls[9].request.body) assert req_params["name"] == "NEXT_PUBLIC_SENTRY_DSN_%s" % uuid assert req_params["value"] == enabled_dsn req_params = json.loads(responses.calls[10].request.body) assert req_params["name"] == "SENTRY_AUTH_TOKEN_%s" % uuid assert req_params["value"] == sentry_auth_token req_params = json.loads(responses.calls[12].request.body) assert req_params["key"] == "SENTRY_ORG" assert req_params["value"] == "sec_0" assert req_params["target"] == "production" req_params = json.loads(responses.calls[14].request.body) assert req_params["key"] == "SENTRY_PROJECT" assert req_params["value"] == "sec_1" assert req_params["target"] == "production" req_params = json.loads(responses.calls[16].request.body) assert req_params["key"] == "NEXT_PUBLIC_SENTRY_DSN" assert req_params["value"] == "sec_2" assert req_params["target"] == "production" req_params = json.loads(responses.calls[18].request.body) assert req_params["key"] == "SENTRY_AUTH_TOKEN" assert req_params["value"] == "sec_3" assert req_params["target"] == "production" req_params = json.loads(responses.calls[20].request.body) assert req_params["key"] == "VERCEL_GITHUB_COMMIT_SHA" assert req_params["value"] == "" assert req_params["target"] == "production"
def mock_responses(): with open('tests/img/01/04/0001.tif', 'rb') as f: responses.add( responses.GET, 'http://sample.sample/0001', body=f.read(), status=200, content_type='image/tiff' ) responses.add( responses.HEAD, 'http://sample.sample/0001', status=200, content_type='image/tiff' ) with open('tests/img/01/04/0001.tif', 'rb') as f: responses.add( responses.GET, 'http://sample.sample/0002', body=f.read(), status=200 ) with open('tests/img/01/04/0001.tif', 'rb') as f: responses.add( responses.GET, 'http://sample.sample/01/02/0003', body=f.read(), status=200, content_type='image/tiff' ) body = ( 'II*\x00\x0c\x00\x00\x00\x80\x00 \x0e\x00\x00\x01\x03\x00\x01\x00\x00' '\x00\x01\x00\x00\x00\x01\x01\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00' '\x02\x01\x03\x00\x01\x00\x00\x00\x08\x00\x00\x00\x03\x01\x03\x00\x01' '\x00\x00\x00\x05\x00\x00\x00\x06\x01\x03\x00\x01\x00\x00\x00\x03\x00' '\x00\x00\x11\x01\x04\x00\x01\x00\x00\x00\x08\x00\x00\x00\x15\x01\x03' '\x00\x01\x00\x00\x00\x01\x00\x00\x00\x16\x01\x03\x00\x01\x00\x00\x00' '\x08\x00\x00\x00\x17\x01\x04\x00\x01\x00\x00\x00\x04\x00\x00\x00\x1a' '\x01\x05\x00\x01\x00\x00\x00\xba\x00\x00\x00\x1b\x01\x05\x00\x01\x00' '\x00\x00\xc2\x00\x00\x00\x1c\x01\x03\x00\x01\x00\x00\x00\x01\x00\x00' '\x00(\x01\x03\x00\x01\x00\x00\x00\x02\x00\x00\x00@\x01\x03\x00\x00' '\x03\x00\x00\xca\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00H\x00\x00\x00' '\x01\x00\x00\x00H\x00\x00\x00\x01\x00\x00\x00\xff`\xe6q\x19\x08\x00' '\x00\x80\t\x00\x00\x80\n\x00\x00\x80\x0b\x00\x00\x80\x0c\x00\x00\x80' '\r' ) responses.add( responses.GET, 'http://sample.sample/0003', body=body, status=200, content_type='image/invalidformat' ) responses.add( responses.GET, 'http://sample.sample/DOESNOTEXIST', body='Does Not Exist', status=404, content_type='application/html' )
def setup(self): participant_team_data = json.loads( challenge_response.challenge_participant_teams) host_team_data = json.loads(challenge_response.challenge_host_teams) empty_leaderboard = json.loads(challenge_response.empty_leaderboard) url = "{}{}" challenges = '{"count": 2, "next": null, "previous": null,"results": []}' responses.add(responses.GET, url.format(API_HOST_URL, URLS.challenge_list.value), json=json.loads(challenges), status=200) responses.add(responses.GET, url.format(API_HOST_URL, URLS.participant_teams.value), json=participant_team_data, status=200) responses.add(responses.GET, url.format(API_HOST_URL, URLS.host_teams.value), json=host_team_data, status=200) responses.add( responses.GET, url.format(API_HOST_URL, URLS.participant_challenges.value).format("3"), json=json.loads(challenges), status=200) responses.add(responses.GET, url.format(API_HOST_URL, URLS.host_challenges.value).format("2"), json=json.loads(challenges), status=200) responses.add( responses.GET, url.format(API_HOST_URL, URLS.challenge_phase_split_detail.value).format("1"), json=[], status=200) responses.add(responses.GET, url.format(API_HOST_URL, URLS.leaderboard.value).format("1"), json=empty_leaderboard, status=200) self.output = "Sorry, no challenges found.\n"
def assert_setup_flow(self, is_team=False, multi_config_org=None, no_name=False): class MockUuid4: hex = "1234567" self.mock_uuid4 = MockUuid4 responses.reset() access_json = { "user_id": "my_user_id", "access_token": "my_access_token", "installation_id": "my_config_id", } if is_team: team_query = "?teamId=my_team_id" access_json["team_id"] = "my_team_id" responses.add( responses.GET, "https://api.vercel.com/v1/teams/my_team_id%s" % team_query, json={ "name": "My Team Name", "slug": "my_team_slug" }, ) else: team_query = "" name = None if no_name else "My Name" responses.add( responses.GET, "https://api.vercel.com/www/user", json={"user": { "name": name, "username": "******" }}, ) responses.add(responses.POST, "https://api.vercel.com/v2/oauth/access_token", json=access_json) responses.add( responses.GET, "https://api.vercel.com/v4/projects/%s" % team_query, json={ "projects": [], "pagination": { "count": 0 } }, ) responses.add( responses.POST, "https://api.vercel.com/v1/integrations/webhooks%s" % team_query, json={"id": "webhook-id"}, ) params = { "configurationId": "config_id", "code": "oauth-code", "next": "https://example.com", } self.pipeline.bind_state("user_id", self.user.id) # TODO: Should use the setup path since we /configure instead resp = self.client.get(self.setup_path, params) mock_request = responses.calls[0].request req_params = parse_qs(mock_request.body) assert req_params["grant_type"] == ["authorization_code"] assert req_params["code"] == ["oauth-code"] assert req_params["redirect_uri"] == [ "http://testserver/extensions/vercel/configure/" ] assert req_params["client_id"] == ["vercel-client-id"] assert req_params["client_secret"] == ["vercel-client-secret"] assert resp.status_code == 200 self.assertDialogSuccess(resp) integration = Integration.objects.get(provider=self.provider.key) external_id = "my_team_id" if is_team else "my_user_id" name = "My Team Name" if is_team else "my_user_name" if no_name else "My Name" installation_type = "team" if is_team else "user" assert integration.external_id == external_id assert integration.name == name configurations = { "my_config_id": { "access_token": "my_access_token", "webhook_id": "webhook-id", "organization_id": self.organization.id, } } if multi_config_org: configurations["orig_config_id"] = { "access_token": "orig_access_token", "webhook_id": "orig-webhook-id", "organization_id": multi_config_org.id, } assert integration.metadata == { "access_token": "my_access_token", "installation_id": "my_config_id", "installation_type": installation_type, "webhook_id": "webhook-id", "configurations": configurations, } assert OrganizationIntegration.objects.get( integration=integration, organization=self.organization) assert SentryAppInstallationForProvider.objects.get( organization=self.organization, provider="vercel")