def test_cookie(self, mock_request): """ Test that each session has its own cookie "sandbox". """ response = Response() response.status_code = 200 response._content = 'Mocked response content' response.headers = {'Set-Cookie': 'name=value'} response.url = 'http://www.test.com/path' mock_request.return_value = response s0 = Session() s1 = Session() # s0 make requests s0.get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True) s0.get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'}) # s1 make requests s1.get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True) s1.get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'}) # s0 make requests again s0.get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'}) s0.get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'})
def get_response(content, code=200): response = Response() if hasattr(content, 'read'): content = content.read() response._content = content response.status_code = code return response
def _build_response_(self, with_errors=False): r = Response() r.status_code = 401 r.encoding = 'utf-8' json = dumps(self._build_json_(with_errors)) r.raw = StringIO(json.encode()) return r
def test_create_session(self, mock_request): response = Response() response.status_code = 201 response._content = json.dumps(test_session) mock_request.return_value = response expires_at = datetime.datetime.utcnow() doc_id = test_document['id'] result = self.api.create_session(doc_id, duration=600, expires_at=expires_at, is_downloadable=True, is_text_selectable=True) self.assertIsNotNone(result) self.assertEqual(result['id'], test_session['id']) data = { 'document_id': doc_id, 'duration': 600, 'expires_at': expires_at.replace(microsecond=0).isoformat(), 'is_downloadable': True, 'is_text_selectable': True } headers = {'Content-Type': 'application/json'} url = urljoin(API_URL, 'sessions') mock_request.assert_called_with('POST', url, data=json.dumps(data), headers=headers)
def test_crate_document_from_url(self, mock_request): response = Response() response.status_code = 201 response._content = json.dumps(test_document) mock_request.return_value = response result = self.api.create_document(url=test_url, name='Test Document', thumbnails='100x100,200x200', non_svg=False) self.assertIsNotNone(result) self.assertEqual(result, test_document) data = { 'url': test_url, 'name': 'Test Document', 'thumbnails': '100x100,200x200', } headers = {'Content-Type': 'application/json'} url = urljoin(API_URL, 'documents') mock_request.assert_called_with('POST', url, data=json.dumps(data), headers=headers) # url of file param is required self.assertRaises(ValueError, self.api.create_document)
def test_get_document_content(self, mock_request): response = Response() response.status_code = 200 response.headers['Content-Type'] = 'text/plain' response._content = 'test' response.raw = six.BytesIO('test') mock_request.return_value = response stream = six.BytesIO() mimetype = self.api.get_document_content(stream, test_document['id']) self.assertEqual(stream.getvalue(), response._content) self.assertEqual(mimetype, response.headers['Content-Type']) stream = six.BytesIO() self.api.get_document_content(stream, test_document['id'], extension='.pdf') self.assertEqual(stream.getvalue(), response._content) stream = six.BytesIO() self.api.get_document_content(stream, test_document['id'], extension='.zip') self.assertEqual(stream.getvalue(), response._content) stream = six.BytesIO() # allowed only .zip and .pdf extensions self.assertRaises(ValueError, self.api.get_document_content, stream, test_document['id'], extension='.docx')
def _receive_response(self, task, response): """ Called by the delegate when a response has been received. This call is expected only on background threads, and thus may not do anything that is not Python-thread-safe. This means that, for example, it is safe to grab things from the _tasks dictionary, but it is not safe to make other method calls on this object unless they explicitly state that they are safe in background threads. """ queue, request = self._tasks[task] resp = Response() resp.status_code = getKey(response, 'statusCode') resp.reason = '' # TODO: Why do I have to do this? raw_headers = getKey(response, 'allHeaderFields') resp.headers = CaseInsensitiveDict(raw_headers) resp.encoding = get_encoding_from_headers(resp.headers) # TODO: This needs to point to an object that we can use to provide # the various raw things that requests needs. resp.raw = None if isinstance(request.url, bytes): resp.url = request.url.decode('utf-8') else: resp.url = request.url resp.request = request resp.connection = self # Put this response on the queue. queue.put_nowait(resp)
def test_disable_default_redirect_cache(self, mock_request): """ Test disable default redirect cache (by setting default redirect cache to None) """ response0 = Response() response0.url = 'http://www.test.com/neverseemeagain' response0.status_code = 301 response0.headers = { 'Location': 'http://www.test.com/redirect_here', } response1 = Response() response1.url = 'http://www.test.com/redirect_here' response1.status_code = 200 response1._content = 'Mocked response content' response1.headers = { 'Vary': 'Accept', } response1.history = [response0] mock_request.return_value = response1 get('http://www.test.com/neverseemeagain') mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True) get('http://www.test.com/neverseemeagain') mock_request.assert_called_with('GET', 'http://www.test.com/redirect_here', allow_redirects=True) set_default_redirect_cache(None) get('http://www.test.com/neverseemeagain') mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True) get('http://www.test.com/neverseemeagain') mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True)
def make_mock_response(filename, status_code=None): response = Response() response.status_code = status_code or 200 response.encoding = "utf-8" with open(os.path.join(resource_location, filename)) as text: response._content = text.read().encode() return response
def test_iterate(self): with patch.object(self.rsm, 'mk_request') as mock: response = Response() response.encoding = 'utf-8' response._content = json.dumps([{ 'uuid': 'person1', 'age': 1, 'name': 'person1' }, { 'uuid': 'person2', 'age': 2, 'name': 'person2' }]) mock.return_value = response person1, person2 = self.rsm.iterate(TestPerson) self.assertEqual(person1.uuid, 'person1') self.assertEqual(person1.age, 1) self.assertEqual(person1.name, 'person1') self.assertTrue(person1.is_read_only()) self.assertEqual(person2.uuid, 'person2') self.assertEqual(person2.age, 2) self.assertEqual(person2.name, 'person2') self.assertTrue(person2.is_read_only()) mock.assert_called_with( 'GET', 'http://www.example.org/repos/foo/%s.json' % ( fqcn(TestPerson),))
def request(method, url, **kwargs): response = Response() response.status_code = 403 response.encoding = 'application/json' response._content = '"Unauthorized: upload_view failed permission check"' response.reason = '403 Forbidden' return response
def test_get_301_only_once(self, mock_request): response0 = Response() response0.url = 'http://www.test.com/neverseemeagain' response0.status_code = 301 response0.headers = { 'Location': 'http://www.test.com/redirect_here', } response1 = Response() response1.url = 'http://www.test.com/redirect_here' response1.status_code = 200 response1._content = 'Mocked response content' response1.headers = { 'Vary': 'Accept', } response1.history = [response0] mock_request.return_value = response1 r = get('http://www.test.com/neverseemeagain') self.assertEqual(mock_request.call_count, 1) mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True) self.assertEqual(r.status_code, 200) #assert we not make request to 301 again r = get('http://www.test.com/neverseemeagain') self.assertEqual(mock_request.call_count, 2) mock_request.assert_called_with('GET', 'http://www.test.com/redirect_here', allow_redirects=True) self.assertEqual(r.status_code, 200)
def build_response(self, req, resp): """Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object. """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, resp) # Give the Response some context. response.request = req response.connection = self return response
def test_set_default_cookie_cache(self, mock_request): response = Response() response.headers = { 'Set-Cookie': 'name=value', } response.url = 'http://www.test.com/path' mock_request.return_value = response C0 = self.cookie_cache C1 = Cache() C2 = Cache() get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True) get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', cookies={'name': 'value'}, allow_redirects=True) set_default_cookie_cache(C1) get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True) get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', cookies={'name': 'value'}, allow_redirects=True) set_default_cookie_cache(C2) get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True) get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', cookies={'name': 'value'}, allow_redirects=True) set_default_cookie_cache(C0) get('http://www.test.com/path') mock_request.assert_called_with('GET', 'http://www.test.com/path', cookies={'name': 'value'}, allow_redirects=True)
def test_file_update(): account = Account.create_from_data(json.loads(helpers.account)) file_data = json.loads(helpers.file_data) file_obj = File.create_from_data(file_data, parent_resource=account) with patch('kloudless.resources.request') as mock_req: resp = Response() new_data = file_data.copy() new_data['name'] = 'NewFileName' resp._content = json.dumps(new_data) account_resp = Response() account_resp._content = helpers.account mock_req.side_effect = (resp,account_resp) file_obj.name = 'NewFileName' file_obj.parent_id = 'root' file_obj.save() expected_calls = [ # This is updating the file call(file_obj._api_session.patch, 'accounts/%s/files/%s' % (account.id, file_data['id']), params={}, data={'name': u'NewFileName', 'parent_id': 'root'}, configuration=file_obj._configuration), # This is refreshing the parent resource call(account._api_session.get, 'accounts/%s' % account.id, configuration=account._configuration), ] mock_req.assert_has_calls(expected_calls)
def test_GET_request(self): request = DummyRequest( path='/proxy/bar/remote/path/@@view', params={'foo': 'bar'}, headers={'X-BRIDGE-ORIGIN': 'foo', 'X-BRIDGE-AC': 'john.doe'}) response = Response() response.status_code = 200 response.raw = StringIO('the response data') response.headers['content-length'] = 17 self.expect(self.requests.request( 'get', 'http://127.0.0.1:9080/bar/remote/path/@@view', params={'foo': 'bar'}, headers={'X-BRIDGE-ORIGIN': 'foo', 'X-BRIDGE-AC': 'john.doe'})).result( response) self.mocker.replay() proxy = queryAdapter(request, IProxy) response = proxy() self.assertTrue(IResponse.providedBy(response)) self.assertEqual(response.status, '200 OK') self.assertEqual(response.body, 'the response data')
def test_proxy_replaces_portal_url_in_data(self): request = DummyRequest( path='/proxy/bar/remote/path/@@view', params={'foo': 'bar %s baz' % PORTAL_URL_PLACEHOLDER}, headers={'X-BRIDGE-ORIGIN': 'foo', 'X-BRIDGE-AC': 'john.doe'}) response = Response() response.status_code = 200 response.raw = StringIO('response') self.expect(self.requests.request( 'get', 'http://127.0.0.1:9080/bar/remote/path/@@view', params={'foo': 'bar http://localhost:8080/foo/ baz'}, headers={'X-BRIDGE-ORIGIN': 'foo', 'X-BRIDGE-AC': 'john.doe'})).result( response) self.mocker.replay() proxy = queryAdapter(request, IProxy) response = proxy() self.assertTrue(IResponse.providedBy(response)) self.assertEqual(response.status, '200 OK')
def get(self, url, header=None, retry_time=5, timeout=30, retry_flag=list(), retry_interval=5, *args, **kwargs): """ get method :param url: target url :param header: headers :param retry_time: retry time when network error :param timeout: network timeout :param retry_flag: if retry_flag in content. do retry :param retry_interval: retry interval(second) :param args: :param kwargs: :return: """ headers = self.header if header and isinstance(header, dict): headers.update(header) while True: try: html = requests.get(url, headers=headers, timeout=timeout) if any(f in html.content for f in retry_flag): raise Exception return html except Exception as e: print(e) retry_time -= 1 if retry_time <= 0: # 多次请求失败 resp = Response() resp.status_code = 200 return resp time.sleep(retry_interval)
def testCreateUrl(self): def _mock_get(url, **kwargs): self.assertEquals("http://demo.piwik.org/", url) self.assertEquals( { "module": "API", "method": "Referers.getKeywords", "format": "json", "idSite": 3, "date": "yesterday", "period": "day", "token_auth": "1231", "filter_limit": 10, }, kwargs.get("params"), ) return response response = Response() response.status_code = 200 response._content = '{"result":"success", "xxx":"aaa"}' old_get = requests.get try: requests.get = _mock_get api = PiwikAPI("http://demo.piwik.org/", "1231") self.assertEquals( {"result": "success", "xxx": "aaa"}, api.Referers.getKeywords(idSite=3, date="yesterday", period="day", filter_limit=10), ) finally: requests.get = old_get
def build_response(self, request, resp): """ Builds a Requests' response object. This emulates most of the logic of the standard fuction but deals with the lack of the ``.headers`` property on the HTTP20Response object. """ response = Response() response.status_code = resp.status response.headers = CaseInsensitiveDict(resp.getheaders()) response.raw = resp response.reason = resp.reason response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, request, response) if isinstance(request.url, bytes): response.url = request.url.decode('utf-8') else: response.url = request.url response.request = request response.connection = self # One last horrible patch: Requests expects its raw responses to have a # release_conn method, which I don't. We should monkeypatch a no-op on. resp.release_conn = lambda: None return response
def test_filter_existing__removes_duplicates(self, http): fixture_data = load_fixture('comments_current.json') response = Response() response._content = fixture_data http.return_value = response gh = Github() problems = Problems() review = Review(gh, 2) filename_1 = "Routing/Filter/AssetCompressor.php" filename_2 = "View/Helper/AssetCompressHelper.php" problems.add(filename_1, 87, 'A pithy remark') problems.add(filename_1, 87, 'Something different') problems.add(filename_2, 88, 'I <3 it') problems.add(filename_2, 89, 'Not such a good comment') review.load_comments() review.remove_existing(problems) res = problems.all(filename_1) eq_(1, len(res)) expected = Comment(filename_1, 87, 87, 'Something different') eq_(res[0], expected) res = problems.all(filename_2) eq_(1, len(res)) expected = Comment(filename_2, 88, 88, 'I <3 it') eq_(res[0], expected)
def test_post_user_already_registed(self, facebook_user_data_mock, access_token_mock): self.fixtures.fake_person(facebookId=10999901201, name="Afonso Ramos", username="******", gender="male") access_token_mock.return_value = "21ghhgv31hjg32j1h3g12" facebook_response = Response() facebook_response.status_code = 200 facebook_response.json = mock.MagicMock(return_value={ "id": "10999901201", "name": "afonsoramosrb"}) facebook_response.content_type = "aplication/json" facebook_user_data_mock.return_value = facebook_response data = {'facebookId': '10999901201'} with transaction.atomic(): response = self.client.post('/api/v1/person/', data, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def request(method, url, **kwargs): if 'data' in kwargs: kwargs['params'] = kwargs.pop('data') elif 'params' in kwargs and kwargs['params'] is None: kwargs.pop('params') auth = None if 'auth' in kwargs: auth = kwargs.pop('auth') for i in ['auth', 'allow_redirects', 'stream']: if i in kwargs: kwargs.pop(i) if app.app.registry.api_url in url: if auth: authorization = api.authorization api.authorization = ('Basic', auth) resp = api._gen_request(method.upper(), url, expect_errors=True, **kwargs) if auth: api.authorization = authorization else: resp = app._gen_request(method.upper(), url, expect_errors=True, **kwargs) response = Response() response.status_code = resp.status_int response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response._content = resp.body response.reason = resp.status if isinstance(url, bytes): response.url = url.decode('utf-8') else: response.url = url response.request = resp.request return response
def send(self, request, **kwargs): url = urlparse(request.url) if url.scheme != 'https': raise Exception('Only HTTPS is supported!') ctx = self._make_context() conn = httpslib.HTTPSConnection( url.hostname, url.port or 443, ssl_context=ctx) conn.request(request.method, url.path, request.body, request.headers) resp = conn.getresponse() response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(request.url, bytes): response.url = request.url.decode('utf-8') else: response.url = request.url # Give the Response some context. response.request = request response.connection = self return response
def create_mock_response(cls, status_code, data, filter=None, order_by=None, page=None, error=None, headers=None): """ Build a fake response Args: status_code: the status code data: the NURESTObject filter: a string representing a filter order_by: a string representing an order by page: a page number """ content = None if type(data) == list: content = list() for obj in data: content.append(obj.to_dict()) elif data: content = data.to_dict() response = Response() response.status_code = status_code response._content = json.dumps(content) if headers: response.headers = headers return MagicMock(return_value=response)
def test_expired_cookie(self, mock_request): response = Response() response.status_code = 200 response._content = 'Mocked response content' response.headers = { 'Set-Cookie': 'a=apple; expires=%s;, b=banana; max-age=6' % _getdate(future=3) } response.url = 'http://www.fruits.com' mock_request.return_value = response get('http://www.fruits.com/path') dummycache_cache.datetime.now = lambda: datetime.now() + timedelta(seconds=1) get('http://www.fruits.com/path') mock_request.assert_called_with('GET', 'http://www.fruits.com/path', allow_redirects=True, cookies={'a': 'apple', 'b': 'banana'}) dummycache_cache.datetime.now = lambda: datetime.now() + timedelta(seconds=4) get('http://www.fruits.com/path') mock_request.assert_called_with('GET', 'http://www.fruits.com/path', allow_redirects=True, cookies={'b': 'banana'}) dummycache_cache.datetime.now = lambda: datetime.now() + timedelta(seconds=11) get('http://www.fruits.com/path') mock_request.assert_called_with('GET', 'http://www.fruits.com/path', allow_redirects=True)
def test_disable_default_cache(self, mock_request): """ Test disable default cache (by setting default cache to None) """ response = Response() response.status_code = 200 response._content = 'Mocked response content' response.headers = { 'Cache-Control': 'max-age=100', } mock_request.return_value = response get('http://www.test.com/path') self.assertEqual(mock_request.call_count, 1) get('http://www.test.com/path') self.assertEqual(mock_request.call_count, 1) set_default_cache(None) get('http://www.test.com/path') self.assertEqual(mock_request.call_count, 2) get('http://www.test.com/path') self.assertEqual(mock_request.call_count, 3) get('http://www.test.com/path') self.assertEqual(mock_request.call_count, 4)
def _prepare_auth_test(self, code=200, side_effect=None): """ Creates the needed defers to test several test situations. It adds up to the auth preprocessing step. :param code: status code for the response of POST in requests :type code: int :param side_effect: side effect triggered by the POST method in requests :type side_effect: some kind of Exception :returns: the defer that is created :rtype: defer.Deferred """ res = Response() res.status_code = code self.auth_backend._session.post = mock.create_autospec( self.auth_backend._session.post, return_value=res, side_effect=side_effect ) d = threads.deferToThread(self.register.register_user, self.TEST_USER, self.TEST_PASS) def wrapper_preproc(*args): return threads.deferToThread( self.auth_backend._authentication_preprocessing, self.TEST_USER, self.TEST_PASS ) d.addCallback(wrapper_preproc) return d
def test_account_list(): with patch('kloudless.resources.request') as mock_req: resp = Response() resp._content = helpers.account_list mock_req.return_value = resp accounts = kloudless.Account().all() assert len(accounts) > 0 assert all([isinstance(x, Account) for x in accounts])
def test_get_document_content_mimetype(self, mock_request): response = Response() response.status_code = 200 response.headers['Content-Type'] = 'text/plain' mock_request.return_value = response mimetype = self.api.get_document_content_mimetype(test_document['id']) self.assertEqual(mimetype, response.headers['Content-Type'])
def cors_error_response(): response = Response() response.status_code = 403 return response
def modify_and_forward( method: str = None, path: str = None, data_bytes: bytes = None, headers: Headers = None, forward_base_url: str = None, listeners: List[ProxyListener] = None, client_address: str = None, server_address: str = None, ): """This is the central function that coordinates the incoming/outgoing messages with the proxy listeners (message interceptors).""" from localstack.services.edge import ProxyListenerEdge # Check origin / referer header before anything else happens. if (not config.DISABLE_CORS_CHECKS and should_enforce_self_managed_service( method, path, headers, data_bytes) and not is_cors_origin_allowed(headers)): LOG.info( "Blocked CORS request from forbidden origin %s", headers.get("origin") or headers.get("referer"), ) return cors_error_response() listeners = [lis for lis in listeners or [] if lis] default_listeners = list(ProxyListener.DEFAULT_LISTENERS) # ensure that MessageModifyingProxyListeners are not applied in the edge proxy request chain # TODO: find a better approach for this! is_edge_request = [ lis for lis in listeners if isinstance(lis, ProxyListenerEdge) ] if is_edge_request: default_listeners = [ lis for lis in default_listeners if not isinstance(lis, MessageModifyingProxyListener) ] listeners_inbound = default_listeners + listeners listeners_outbound = listeners + default_listeners data = data_bytes original_request = RoutingRequest(method=method, path=path, data=data, headers=headers) def is_full_url(url): return re.match(r"[a-zA-Z]+://.+", url) def get_proxy_backend_url(_path, original_url=None, run_listeners=False): if is_full_url(_path): _path = _path.split("://", 1)[1] _path = "/%s" % (_path.split("/", 1)[1] if "/" in _path else "") base_url = forward_base_url or original_url result = update_path_in_url(base_url, _path) if run_listeners: for listener in listeners_inbound: result = listener.get_forward_url(method, path, data, headers) or result return result target_url = path if not is_full_url(target_url): target_url = "%s%s" % (forward_base_url, target_url) # update original "Host" header (moto s3 relies on this behavior) if not headers.get("Host"): headers["host"] = urlparse(target_url).netloc headers["X-Forwarded-For"] = build_x_forwarded_for(headers, client_address, server_address) response = None handler_chain_request = original_request.copy() modified_request_to_backend = None # run inbound handlers (pre-invocation) for listener in listeners_inbound: try: listener_result = listener.forward_request( method=handler_chain_request.method, path=handler_chain_request.path, data=handler_chain_request.data, headers=handler_chain_request.headers, ) except HTTPException as e: # TODO: implement properly using exception handlers return http_exception_to_response(e) if isinstance(listener, MessageModifyingProxyListener): if isinstance(listener_result, RoutingRequest): # update the modified request details, then call next listener handler_chain_request.method = (listener_result.method or handler_chain_request.method) handler_chain_request.path = listener_result.path or handler_chain_request.path if listener_result.data is not None: handler_chain_request.data = listener_result.data if listener_result.headers is not None: handler_chain_request.headers = listener_result.headers continue if isinstance(listener_result, Response): response = listener_result break if isinstance(listener_result, LambdaResponse): response = listener_result break if isinstance(listener_result, dict): response = Response() response._content = json.dumps(json_safe(listener_result)) response.headers["Content-Type"] = APPLICATION_JSON response.status_code = 200 break elif isinstance(listener_result, Request): # TODO: unify modified_request_to_backend (requests.Request) and # handler_chain_request (ls.routing.Request) modified_request_to_backend = listener_result break elif http2_server.get_async_generator_result(listener_result): return listener_result elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 response = Response() response.status_code = code response._content = "" response.headers["Content-Length"] = "0" append_cors_headers(request_headers=headers, response=response) return response # perform the actual invocation of the backend service headers_to_send = None data_to_send = None method_to_send = None if response is None: headers_to_send = handler_chain_request.headers headers_to_send["Connection"] = headers_to_send.get( "Connection") or "close" data_to_send = handler_chain_request.data method_to_send = handler_chain_request.method request_url = get_proxy_backend_url(handler_chain_request.path, run_listeners=True) if modified_request_to_backend: if modified_request_to_backend.url: request_url = get_proxy_backend_url( modified_request_to_backend.url, original_url=request_url) data_to_send = modified_request_to_backend.data if modified_request_to_backend.method: method_to_send = modified_request_to_backend.method # make sure we drop "chunked" transfer encoding from the headers to be forwarded headers_to_send.pop("Transfer-Encoding", None) response = requests.request( method_to_send, url=request_url, data=data_to_send, headers=headers_to_send, stream=True, verify=False, ) # prevent requests from processing response body (e.g., to pass-through gzip encoded content # unmodified) not_consumed = not getattr(response, "_content_consumed", True) pass_raw = not_consumed or response.headers.get("content-encoding") in [ "gzip" ] if pass_raw and getattr(response, "raw", None): new_content = response.raw.read() if new_content: response._content = new_content # run outbound handlers (post-invocation) for listener in listeners_outbound: updated_response = listener.return_response( method=method_to_send or handler_chain_request.method, path=handler_chain_request.path, data=data_to_send or handler_chain_request.data, headers=headers_to_send or handler_chain_request.headers, response=response, ) message_modifier = isinstance(listener, MessageModifyingProxyListener) if message_modifier and isinstance(updated_response, RoutingResponse): # update the fields from updated_response in final response response.status_code = updated_response.status_code or response.status_code response.headers = updated_response.headers or response.headers if isinstance(updated_response.content, (str, bytes)): response._content = updated_response.content if isinstance(updated_response, Response): response = updated_response # allow pre-flight CORS headers by default from localstack.services.s3.s3_listener import ProxyListenerS3 is_s3_listener = any( isinstance(service_listener, ProxyListenerS3) for service_listener in listeners) if not is_s3_listener: append_cors_headers(request_headers=headers, response=response) return response
def forward_request(self, method, path, data, headers): if path.split('?')[0] == '/health': return serve_health_endpoint(method, path, data) if method == 'POST' and path == '/graph': return serve_resource_graph(data) # kill the process if we receive this header headers.get(HEADER_KILL_SIGNAL) and os._exit(0) target = headers.get('x-amz-target', '') auth_header = get_auth_string(method, path, headers, data) if auth_header and not headers.get('authorization'): headers['authorization'] = auth_header host = headers.get('host', '') orig_req_url = headers.pop(HEADER_LOCALSTACK_REQUEST_URL, '') headers[HEADER_LOCALSTACK_EDGE_URL] = ( re.sub(r'^([^:]+://[^/]+).*', r'\1', orig_req_url) or 'http://%s' % host) # extract API details api, port, path, host = get_api_from_headers(headers, method=method, path=path, data=data) if api and config.LS_LOG: # print request trace for debugging, if enabled LOG.debug('IN(%s): "%s %s" - headers: %s - data: %s' % (api, method, path, dict(headers), data)) set_default_region_in_headers(headers) if port and int(port) < 0: return 404 if not port: api, port = get_api_from_custom_rules(method, path, data, headers) or (api, port) if not port: if method == 'OPTIONS': if api and config.LS_LOG: # print request trace for debugging, if enabled LOG.debug('OUT(%s): "%s %s" - status: %s' % (api, method, path, 200)) return 200 if api in ['', None, API_UNKNOWN]: truncated = truncate(data) if auth_header or target or data or path not in ['/', '/favicon.ico']: LOG.info(('Unable to find forwarding rule for host "%s", path "%s %s", ' 'target header "%s", auth header "%s", data "%s"') % ( host, method, path, target, auth_header, truncated)) else: LOG.info(('Unable to determine forwarding port for API "%s" - please ' 'make sure this API is enabled via the SERVICES configuration') % api) response = Response() response.status_code = 404 response._content = '{"status": "running"}' return response if api and not headers.get('Authorization'): headers['Authorization'] = aws_stack.mock_aws_request_headers(api)['Authorization'] headers[HEADER_TARGET_API] = str(api) headers['Host'] = host if isinstance(data, dict): data = json.dumps(data) encoding_type = headers.get('Content-Encoding') or '' if encoding_type.upper() == GZIP_ENCODING.upper() and api not in [S3]: headers.set('Content-Encoding', IDENTITY_ENCODING) data = gzip.decompress(data) lock_ctx = BOOTSTRAP_LOCK if persistence.API_CALLS_RESTORED or is_internal_call_context(headers): lock_ctx = empty_context_manager() with lock_ctx: return do_forward_request(api, method, path, data, headers, port=port)
async def test_form_invalid_auth(hass, caplog): """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}) caplog.set_level(logging.DEBUG) response = Response() response.status_code = 401 api_auth_error_unknown = AuthenticationError( response, "authentication_error", "unknown error", [{ "id": "authentication_error", "message": "unknown error" }], ) with patch( "coinbase.wallet.client.Client.get_current_user", side_effect=api_auth_error_unknown, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF", }, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth"} assert "Coinbase rejected API credentials due to an unknown error" in caplog.text api_auth_error_key = AuthenticationError( response, "authentication_error", "invalid api key", [{ "id": "authentication_error", "message": "invalid api key" }], ) with patch( "coinbase.wallet.client.Client.get_current_user", side_effect=api_auth_error_key, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF", }, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth_key"} assert "Coinbase rejected API credentials due to an invalid API key" in caplog.text api_auth_error_secret = AuthenticationError( response, "authentication_error", "invalid signature", [{ "id": "authentication_error", "message": "invalid signature" }], ) with patch( "coinbase.wallet.client.Client.get_current_user", side_effect=api_auth_error_secret, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF", }, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth_secret"} assert ("Coinbase rejected API credentials due to an invalid API secret" in caplog.text)
def create_search(self, query_expression): respObj = Response() respObj.code = "401" respObj.error_type = "" respObj.status_code = 401 # print("query="+query_expression) if self.client_aux.access_token: self.query = query_expression response = self.build_searchId() if response is not None: respObj.code = "200" respObj.error_type = "" respObj.status_code = 200 content = '{"search_id": "' + \ str(response) + \ '", "data": {"message": "Search id generated."}}' respObj._content = bytes(content, 'utf-8') else: respObj.code = "404" respObj.error_type = "Not found" respObj.status_code = 404 respObj.message = "Could not generate search id." else: respObj.error_type = "Unauthorized: Access token could not be generated." respObj.message = "Unauthorized: Access token could not be generated." # return ResponseWrapper(respObj)
def __init__(self, to_return): Response.__init__(self) self.to_return = to_return
def http_response_to_response(self, http_response, prepared_request): """ transform a WSGIResponse into a requests's Response model :param django.http.response.HttpResponse|django.http.response.StreamingHttpResponse http_response: the http response send by django view :return: the requests's Response model corresponding to the http_response :rtype: Response """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(http_response, 'status_code', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict( getattr(http_response._headers, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = http_response response.reason = response.raw.reason_phrase try: response._content = http_response.content except AttributeError: response._content = http_response.getvalue() response.raw.read = lambda *args: http_response.getvalue() req = prepared_request if isinstance(req.url, bytes): # pragma: no cover response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, response) # Give the Response some context. response.request = req response.connection = self return response
def google_directions_api_response_multiple_legs(): response = Response() response.status_code = 200 response._content = json.dumps({ "geocoded_waypoints": [{ "geocoder_status": "OK", "place_id": "ChIJOTfQ9isbdkgR_-PC_VFWNUs", "types": ["street_address"] }, { "geocoder_status": "OK", "place_id": "ChIJQbcuHzwbdkgRamIMZzZGjxg", "types": ["cafe", "establishment", "food", "point_of_interest"] }], "routes": [{ "bounds": { "northeast": { "lat": 51.5193916, "lng": -0.1356769 }, "southwest": { "lat": 51.5188908, "lng": -0.1369381 } }, "copyrights": "Map data ©2020 Google", "legs": [{ "distance": { "text": "0.1 km", "value": 104 }, "duration": { "text": "1 min", "value": 44 }, "duration_in_traffic": { "text": "1 min", "value": 35 }, "end_address": "49 Newman St, Fitzrovia, London W1T 3DZ, UK", "end_location": { "lat": 51.5188908, "lng": -0.1369381 }, "start_address": "42 Charlotte St, Fitzrovia, London W1T 2NP, UK", "start_location": { "lat": 51.5193916, "lng": -0.1356769 }, "steps": [{ "distance": { "text": "0.1 km", "value": 104 }, "duration": { "text": "1 min", "value": 44 }, "end_location": { "lat": 51.5188908, "lng": -0.1369381 }, "html_instructions": "Head \u003cb\u003esouth-west\u003c/b\u003e on \u003cb\u003eGoodge St\u003c/b\u003e/\u003cwbr/\u003e\u003cb\u003eA5204\u003c/b\u003e towards \u003cb\u003eCharlotte Pl\u003c/b\u003e", "polyline": { "points": "ekmyH~nYRp@Rp@?@DLRr@Rp@L`@" }, "start_location": { "lat": 51.5193916, "lng": -0.1356769 }, "travel_mode": "DRIVING" }], "traffic_speed_entry": [], "via_waypoint": [] }, { "distance": { "text": "0.1 km", "value": 104 }, "duration": { "text": "1 min", "value": 44 }, "duration_in_traffic": { "text": "1 min", "value": 35 }, "end_address": "49 Newman St, Fitzrovia, London W1T 3DZ, UK", "end_location": { "lat": 51.5188908, "lng": -0.1369381 }, "start_address": "42 Charlotte St, Fitzrovia, London W1T 2NP, UK", "start_location": { "lat": 51.5193916, "lng": -0.1356769 }, "steps": [{ "distance": { "text": "0.1 km", "value": 104 }, "duration": { "text": "1 min", "value": 44 }, "end_location": { "lat": 51.5188908, "lng": -0.1369381 }, "html_instructions": "Head \u003cb\u003esouth-west\u003c/b\u003e on \u003cb\u003eGoodge St\u003c/b\u003e/\u003cwbr/\u003e\u003cb\u003eA5204\u003c/b\u003e towards \u003cb\u003eCharlotte Pl\u003c/b\u003e", "polyline": { "points": "ekmyH~nYRp@Rp@?@DLRr@Rp@L`@" }, "start_location": { "lat": 51.5193916, "lng": -0.1356769 }, "travel_mode": "DRIVING" }], "traffic_speed_entry": [], "via_waypoint": [] }], "overview_polyline": { "points": "ekmyH~nYbBzFblahblah" }, "summary": "Goodge St/A5204", "warnings": [], "waypoint_order": [] }], "status": "OK" }).encode('utf-8') return response
def forward_request(self, method, path, data, headers): if config.EDGE_FORWARD_URL: return do_forward_request_network( 0, method, path, data, headers, target_url=config.EDGE_FORWARD_URL) # kill the process if we receive this header headers.get(HEADER_KILL_SIGNAL) and sys.exit(0) target = headers.get("x-amz-target", "") auth_header = get_auth_string(method, path, headers, data) if auth_header and not headers.get("authorization"): headers["authorization"] = auth_header host = headers.get("host", "") orig_req_url = headers.pop(HEADER_LOCALSTACK_REQUEST_URL, "") headers[HEADER_LOCALSTACK_EDGE_URL] = (re.sub( r"^([^:]+://[^/]+).*", r"\1", orig_req_url) or "http://%s" % host) # extract API details api, port, path, host = get_api_from_headers(headers, method=method, path=path, data=data) set_default_region_in_headers(headers) if port and int(port) < 0: return 404 if not port: api, port = get_api_from_custom_rules(method, path, data, headers) or ( api, port, ) should_log_trace = is_trace_logging_enabled(headers) if api and should_log_trace: # print request trace for debugging, if enabled LOG.debug('IN(%s): "%s %s" - headers: %s - data: %s' % (api, method, path, dict(headers), data)) if not port: if method == "OPTIONS": if api and should_log_trace: # print request trace for debugging, if enabled LOG.debug('IN(%s): "%s %s" - status: %s' % (api, method, path, 200)) return 200 if api in ["", None, API_UNKNOWN]: truncated = truncate(data) if auth_header or target or data or path not in [ "/", "/favicon.ico" ]: LOG.info(( 'Unable to find forwarding rule for host "%s", path "%s %s", ' 'target header "%s", auth header "%s", data "%s"') % (host, method, path, target, auth_header, truncated)) else: LOG.info(( 'Unable to determine forwarding port for API "%s" - please ' "make sure this API is enabled via the SERVICES configuration" ) % api) response = Response() response.status_code = 404 response._content = '{"status": "running"}' return response if api and not headers.get("Authorization"): headers["Authorization"] = aws_stack.mock_aws_request_headers( api)["Authorization"] headers[HEADER_TARGET_API] = str(api) headers["Host"] = host if isinstance(data, dict): data = json.dumps(data) encoding_type = headers.get("Content-Encoding") or "" if encoding_type.upper() == GZIP_ENCODING.upper( ) and api not in SKIP_GZIP_APIS: headers.set("Content-Encoding", IDENTITY_ENCODING) data = gzip.decompress(data) is_internal_call = is_internal_call_context(headers) self._require_service(api) lock_ctx = BOOTSTRAP_LOCK if is_internal_call or persistence.is_persistence_restored(): lock_ctx = empty_context_manager() with lock_ctx: result = do_forward_request(api, method, path, data, headers, port=port) if should_log_trace and result not in [None, False, True]: result_status_code = getattr(result, "status_code", result) result_headers = getattr(result, "headers", {}) result_content = getattr(result, "content", "") LOG.debug( 'OUT(%s): "%s %s" - status: %s - response headers: %s - response: %s', api, method, path, result_status_code, dict(result_headers or {}), result_content, ) return result
def build_response(self, request, resp): """ Builds a Requests' response object. This emulates most of the logic of the standard fuction but deals with the lack of the ``.headers`` property on the HTTP20Response object. Additionally, this function builds in a number of features that are purely for HTTPie. This is to allow maximum compatibility with what urllib3 does, so that HTTPie doesn't fall over when it uses us. """ response = Response() response.status_code = resp.status response.headers = CaseInsensitiveDict(resp.headers.iter_raw()) response.raw = resp response.reason = resp.reason response.encoding = get_encoding_from_headers(response.headers) extract_cookies_to_jar(response.cookies, request, response) response.url = request.url response.request = request response.connection = self # First horrible patch: Requests expects its raw responses to have a # release_conn method, which I don't. We should monkeypatch a no-op on. resp.release_conn = lambda: None # Next, add the things HTTPie needs. It needs the following things: # # - The `raw` object has a property called `_original_response` that is # a `httplib` response object. # - `raw._original_response` has three simple properties: `version`, # `status`, `reason`. # - `raw._original_response.version` has one of three values: `9`, # `10`, `11`. # - `raw._original_response.msg` exists. # - `raw._original_response.msg._headers` exists and is an iterable of # two-tuples. # # We fake this out. Most of this exists on our response object already, # and the rest can be faked. # # All of this exists for httpie, which I don't have any tests for, # so I'm not going to bother adding test coverage for it. class FakeOriginalResponse(object): # pragma: no cover def __init__(self, headers): self._headers = headers def get_all(self, name, default=None): values = [] for n, v in self._headers: if n == name.lower(): values.append(v) if not values: return default return values def getheaders(self, name): return self.get_all(name, []) response.raw._original_response = orig = FakeOriginalResponse(None) orig.version = 20 orig.status = resp.status orig.reason = resp.reason orig.msg = FakeOriginalResponse(resp.headers.iter_raw()) return response
def _mock_response(response_content_filename: str) -> Response: api_response_mock = Response() api_response_mock._content = open( os.path.join('tests', 'alerts', 'miscellaneous', response_content_filename), 'rb').read() return api_response_mock
def disable_sending(self, request, *args, **kwargs): response = Response() response.status_code = 200 response.request = request return response
def update_content_length(response: Response): if response and response.content is not None: response.headers["Content-Length"] = str(len(response.content))
def test_new(self, mock_post): resp = Response() resp.status_code = 200 resp.content = "authtoken_value" mock_post.return_value = resp test(self)
def return_response(self, method, path, data, headers, response): action = headers.get('X-Amz-Target') data = json.loads(to_str(data)) records = [] if action in (ACTION_CREATE_STREAM, ACTION_DELETE_STREAM): event_type = (event_publisher.EVENT_KINESIS_CREATE_STREAM if action == ACTION_CREATE_STREAM else event_publisher.EVENT_KINESIS_DELETE_STREAM) payload = {'n': event_publisher.get_hash(data.get('StreamName'))} if action == ACTION_CREATE_STREAM: payload['s'] = data.get('ShardCount') event_publisher.fire_event(event_type, payload=payload) elif action == ACTION_PUT_RECORD: response_body = json.loads(to_str(response.content)) event_record = { 'data': data['Data'], 'partitionKey': data['PartitionKey'], 'sequenceNumber': response_body.get('SequenceNumber') } event_records = [event_record] stream_name = data['StreamName'] lambda_api.process_kinesis_records(event_records, stream_name) elif action == ACTION_PUT_RECORDS: event_records = [] response_body = json.loads(to_str(response.content)) if 'Records' in response_body: response_records = response_body['Records'] records = data['Records'] for index in range(0, len(records)): record = records[index] event_record = { 'data': record['Data'], 'partitionKey': record['PartitionKey'], 'sequenceNumber': response_records[index].get('SequenceNumber') } event_records.append(event_record) stream_name = data['StreamName'] lambda_api.process_kinesis_records(event_records, stream_name) elif action == ACTION_UPDATE_SHARD_COUNT: # Currently kinesalite, which backs the Kinesis implementation for localstack, does # not support UpdateShardCount: # https://github.com/mhart/kinesalite/issues/61 # # [Terraform](https://www.terraform.io) makes the call to UpdateShardCount when it # applies Kinesis resources. A Terraform run fails when this is not present. # # The code that follows just returns a successful response, bypassing the 400 # response that kinesalite returns. # response = Response() response.status_code = 200 content = { 'CurrentShardCount': 1, 'StreamName': data['StreamName'], 'TargetShardCount': data['TargetShardCount'] } response.encoding = 'UTF-8' response._content = json.dumps(content) return response
def google_directions_api_response_without_traffic_info(): response = Response() response.status_code = 200 response._content = json.dumps({ "geocoded_waypoints": [{ "geocoder_status": "OK", "place_id": "ChIJQbcuHzwbdkgRamIMZzZGjxg", "types": ["cafe", "establishment", "food", "point_of_interest"] }, { "geocoder_status": "OK", "place_id": "ChIJi8rZjSkbdkgRBluJBmAZK1w", "types": ["street_address"] }], "routes": [{ "bounds": { "northeast": { "lat": 51.5208376, "lng": -0.1369381 }, "southwest": { "lat": 51.5188908, "lng": -0.1391098 } }, "copyrights": "Map data ©2020 Google", "legs": [{ "distance": { "text": "0.3 km", "value": 264 }, "duration": { "text": "1 min", "value": 71 }, "end_address": "65 Cleveland St, Fitzrovia, London W1T 4JZ, UK", "end_location": { "lat": 51.5208376, "lng": -0.1391098 }, "start_address": "49 Newman St, Fitzrovia, London W1T 3DZ, UK", "start_location": { "lat": 51.5188908, "lng": -0.1369381 }, "steps": [{ "distance": { "text": "0.3 km", "value": 264 }, "duration": { "text": "1 min", "value": 71 }, "end_location": { "lat": 51.5208376, "lng": -0.1391098 }, "html_instructions": "Head \u003cb\u003enorth-west\u003c/b\u003e on \u003cb\u003eCleveland St\u003c/b\u003e towards \u003cb\u003eTottenham St\u003c/b\u003e", "polyline": { "points": "ahmyHzvYCBCFs@t@oAtAaAdAy@bAA@WX_AjAc@f@" }, "start_location": { "lat": 51.5188908, "lng": -0.1369381 }, "travel_mode": "DRIVING" }], "traffic_speed_entry": [], "via_waypoint": [] }], "overview_polyline": { "points": "ahmyHzvYkCvCuCdDcBrB" }, "summary": "Cleveland St", "warnings": [], "waypoint_order": [] }], "status": "OK" }).encode('utf-8') return response
def test_base_get_exception_class(self): base = CensysAPIBase("url") assert base._get_exception_class(Response()) == CensysAPIException
def make_accepted_response(): response = Response() response.status_code = 202 return response
def delete_cors(bucket_name): # TODO: check if bucket exists, otherwise return 404-like error BUCKET_CORS.pop(bucket_name, {}) response = Response() response.status_code = 200 return response
def make_error_response(message, code=400): response = Response() response.status_code = code response._content = json.dumps({'message': message}) return response
def forward_request(self, **kwargs): response = Response() response.status_code = 200 response._content = kwargs.get('data') or '{}' return response
class WebRequest(object): name = "web_request" def __init__(self, *args, **kwargs): self.log = LogHandler(self.name, file=False) self.response = Response() @property def user_agent(self): """ return an User-Agent at random :return: """ ua_list = [ 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71', 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)', 'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0', ] return random.choice(ua_list) @property def header(self): """ basic header :return: """ return { 'User-Agent': self.user_agent, 'Accept': '*/*', 'Connection': 'keep-alive', 'Accept-Language': 'zh-CN,zh;q=0.8' } def get(self, url, header=None, retry_time=3, retry_interval=5, timeout=5, *args, **kwargs): """ get method :param url: target url :param header: headers :param retry_time: retry time :param retry_interval: retry interval :param timeout: network timeout :return: """ headers = self.header if header and isinstance(header, dict): headers.update(header) while True: try: self.response = requests.get(url, headers=headers, timeout=timeout, *args, **kwargs) return self except Exception as e: self.log.error("requests: %s error: %s" % (url, str(e))) retry_time -= 1 if retry_time <= 0: resp = Response() resp.status_code = 200 return self self.log.info("retry %s second after" % retry_interval) time.sleep(retry_interval) def post(self, url, header=None, retry_time=3, retry_interval=5, timeout=5, *args, **kwargs): """ post method :param url: target url :param header: headers :param retry_time: retry time :param retry_interval: retry interval :param timeout: network timeout :return: """ headers = self.header if header and isinstance(header, dict): headers.update(header) while True: try: self.response = requests.post(url, headers=headers, timeout=timeout, *args, **kwargs) return self except Exception as e: self.log.error("requests: %s error: %s" % (url, str(e))) retry_time -= 1 if retry_time <= 0: resp = Response() resp.status_code = 200 return self self.log.info("retry %s second after" % retry_interval) time.sleep(retry_interval) @property def tree(self): return etree.HTML(self.response.content) @property def text(self): return self.response.text @property def json(self): try: return self.response.json() except Exception as e: self.log.error(str(e)) return {}
def test_delete_storage_profile(self, mock_request): response = Response() response.status_code = 204 mock_request.return_value = response self.api.delete_storage_profile()
def test_delete_webhook(self, mock_request): response = Response() response.status_code = 204 mock_request.return_value = response self.api.delete_webhook()
def forward_request(self, method, path, data, headers): if path.startswith('/shell') or method == 'GET': if path == '/shell': headers = { 'Refresh': '0; url=%s/shell/' % config.TEST_DYNAMODB_URL } return aws_responses.requests_response('', headers=headers) return True if method == 'OPTIONS': return 200 if not data: data = '{}' data = json.loads(to_str(data)) ddb_client = aws_stack.connect_to_service('dynamodb') action = headers.get('X-Amz-Target') if self.should_throttle(action): return self.error_response_throughput() ProxyListenerDynamoDB.thread_local.existing_item = None if action == '%s.CreateTable' % ACTION_PREFIX: # Check if table exists, to avoid error log output from DynamoDBLocal if self.table_exists(ddb_client, data['TableName']): return error_response(message='Table already created', error_type='ResourceInUseException', code=400) if action == '%s.CreateGlobalTable' % ACTION_PREFIX: return create_global_table(data) elif action == '%s.DescribeGlobalTable' % ACTION_PREFIX: return describe_global_table(data) elif action == '%s.ListGlobalTables' % ACTION_PREFIX: return list_global_tables(data) elif action == '%s.UpdateGlobalTable' % ACTION_PREFIX: return update_global_table(data) elif action in ('%s.PutItem' % ACTION_PREFIX, '%s.UpdateItem' % ACTION_PREFIX, '%s.DeleteItem' % ACTION_PREFIX): # find an existing item and store it in a thread-local, so we can access it in return_response, # in order to determine whether an item already existed (MODIFY) or not (INSERT) try: ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item( data) except Exception as e: if 'ResourceNotFoundException' in str(e): return get_table_not_found_error() raise # Fix incorrect values if ReturnValues==ALL_OLD and ReturnConsumedCapacity is # empty, see https://github.com/localstack/localstack/issues/2049 if ((data.get('ReturnValues') == 'ALL_OLD') or (not data.get('ReturnValues'))) \ and not data.get('ReturnConsumedCapacity'): data['ReturnConsumedCapacity'] = 'TOTAL' return Request(data=json.dumps(data), method=method, headers=headers) elif action == '%s.DescribeTable' % ACTION_PREFIX: # Check if table exists, to avoid error log output from DynamoDBLocal if not self.table_exists(ddb_client, data['TableName']): return get_table_not_found_error() elif action == '%s.DeleteTable' % ACTION_PREFIX: # Check if table exists, to avoid error log output from DynamoDBLocal if not self.table_exists(ddb_client, data['TableName']): return get_table_not_found_error() elif action == '%s.BatchWriteItem' % ACTION_PREFIX: existing_items = [] for table_name in sorted(data['RequestItems'].keys()): for request in data['RequestItems'][table_name]: for key in ['PutRequest', 'DeleteRequest']: inner_request = request.get(key) if inner_request: existing_items.append( find_existing_item(inner_request, table_name)) ProxyListenerDynamoDB.thread_local.existing_items = existing_items elif action == '%s.Query' % ACTION_PREFIX: if data.get('IndexName'): if not is_index_query_valid(to_str(data['TableName']), data.get('Select')): return error_response( message= 'One or more parameter values were invalid: Select type ' 'ALL_ATTRIBUTES is not supported for global secondary index id-index ' 'because its projection type is not ALL', error_type='ValidationException', code=400) elif action == '%s.TransactWriteItems' % ACTION_PREFIX: existing_items = [] for item in data['TransactItems']: for key in ['Put', 'Update', 'Delete']: inner_item = item.get(key) if inner_item: existing_items.append(find_existing_item(inner_item)) ProxyListenerDynamoDB.thread_local.existing_items = existing_items elif action == '%s.UpdateTimeToLive' % ACTION_PREFIX: # TODO: TTL status is maintained/mocked but no real expiry is happening for items response = Response() response.status_code = 200 self._table_ttl_map[data['TableName']] = { 'AttributeName': data['TimeToLiveSpecification']['AttributeName'], 'Status': data['TimeToLiveSpecification']['Enabled'] } response._content = json.dumps( {'TimeToLiveSpecification': data['TimeToLiveSpecification']}) fix_headers_for_updated_response(response) return response elif action == '%s.DescribeTimeToLive' % ACTION_PREFIX: response = Response() response.status_code = 200 if data['TableName'] in self._table_ttl_map: if self._table_ttl_map[data['TableName']]['Status']: ttl_status = 'ENABLED' else: ttl_status = 'DISABLED' response._content = json.dumps({ 'TimeToLiveDescription': { 'AttributeName': self._table_ttl_map[data['TableName']] ['AttributeName'], 'TimeToLiveStatus': ttl_status } }) else: # TTL for dynamodb table not set response._content = json.dumps({ 'TimeToLiveDescription': { 'TimeToLiveStatus': 'DISABLED' } }) fix_headers_for_updated_response(response) return response elif action == '%s.TagResource' % ACTION_PREFIX or action == '%s.UntagResource' % ACTION_PREFIX: response = Response() response.status_code = 200 response._content = '' # returns an empty body on success. fix_headers_for_updated_response(response) return response elif action == '%s.ListTagsOfResource' % ACTION_PREFIX: response = Response() response.status_code = 200 response._content = json.dumps({ 'Tags': [{ 'Key': k, 'Value': v } for k, v in TABLE_TAGS.get(data['ResourceArn'], {}).items()] }) fix_headers_for_updated_response(response) return response return True
def test_delete_document(self, mock_request): response = Response() response.status_code = 204 mock_request.return_value = response self.api.delete_document(TEST_DOCUMENT['id'])
def modify_and_forward( method=None, path=None, data_bytes=None, headers=None, forward_base_url=None, listeners=None, request_handler=None, client_address=None, server_address=None, ): """This is the central function that coordinates the incoming/outgoing messages with the proxy listeners (message interceptors).""" # Check origin / referer header before anything else happens. if (not config.DISABLE_CORS_CHECKS and should_enforce_self_managed_service( method, path, headers, data_bytes) and not is_cors_origin_allowed(headers)): LOG.info( "Blocked CORS request from forbidden origin %s", headers.get("origin") or headers.get("referer"), ) return cors_error_response() listeners = ProxyListener.DEFAULT_LISTENERS + (listeners or []) listeners = [lis for lis in listeners if lis] data = data_bytes def is_full_url(url): return re.match(r"[a-zA-Z]+://.+", url) if is_full_url(path): path = path.split("://", 1)[1] path = "/%s" % (path.split("/", 1)[1] if "/" in path else "") proxy_url = "%s%s" % (forward_base_url, path) for listener in listeners: proxy_url = listener.get_forward_url(method, path, data, headers) or proxy_url target_url = path if not is_full_url(target_url): target_url = "%s%s" % (forward_base_url, target_url) # update original "Host" header (moto s3 relies on this behavior) if not headers.get("Host"): headers["host"] = urlparse(target_url).netloc headers["X-Forwarded-For"] = build_x_forwarded_for(headers, client_address, server_address) response = None modified_request = None # update listener (pre-invocation) for listener in listeners: try: listener_result = listener.forward_request(method=method, path=path, data=data, headers=headers) except HTTPException as e: # TODO: implement properly using exception handlers return http_exception_to_response(e) if isinstance(listener_result, Response): response = listener_result break if isinstance(listener_result, LambdaResponse): response = listener_result break if isinstance(listener_result, dict): response = Response() response._content = json.dumps(json_safe(listener_result)) response.headers["Content-Type"] = APPLICATION_JSON response.status_code = 200 break elif isinstance(listener_result, Request): modified_request = listener_result data = modified_request.data headers = modified_request.headers break elif http2_server.get_async_generator_result(listener_result): return listener_result elif listener_result is not True: # get status code from response, or use Bad Gateway status code code = listener_result if isinstance(listener_result, int) else 503 response = Response() response.status_code = code response._content = "" response.headers["Content-Length"] = "0" append_cors_headers(request_headers=headers, response=response) return response # perform the actual invocation of the backend service if response is None: headers["Connection"] = headers.get("Connection") or "close" data_to_send = data_bytes request_url = proxy_url if modified_request: if modified_request.url: request_url = "%s%s" % (forward_base_url, modified_request.url) data_to_send = modified_request.data # make sure we drop "chunked" transfer encoding from the headers to be forwarded headers.pop("Transfer-Encoding", None) response = requests.request(method, request_url, data=data_to_send, headers=headers, stream=True, verify=False) # prevent requests from processing response body (e.g., to pass-through gzip encoded content unmodified) pass_raw = (hasattr(response, "_content_consumed") and not response._content_consumed ) or response.headers.get("content-encoding") in ["gzip"] if pass_raw and getattr(response, "raw", None): new_content = response.raw.read() if new_content: response._content = new_content # update listener (post-invocation) if listeners: update_listener = listeners[-1] kwargs = { "method": method, "path": path, "data": data_bytes, "headers": headers, "response": response, } if "request_handler" in inspect.getfullargspec( update_listener.return_response).args: # some listeners (e.g., sqs_listener.py) require additional details like the original # request port, hence we pass in a reference to this request handler as well. kwargs["request_handler"] = request_handler updated_response = update_listener.return_response(**kwargs) if isinstance(updated_response, Response): response = updated_response # allow pre-flight CORS headers by default from localstack.services.s3.s3_listener import ProxyListenerS3 is_s3_listener = any([ isinstance(service_listener, ProxyListenerS3) for service_listener in listeners ]) if not is_s3_listener: append_cors_headers(request_headers=headers, response=response) return response
def __init__(self, *args, **kwargs): self.log = LogHandler(self.name, file=False) self.response = Response()
def invoke_rest_api(api_id, stage, method, invocation_path, data, headers, path=None): path = path or invocation_path relative_path, query_string_params = extract_query_string_params( path=invocation_path) path_map = helpers.get_rest_api_paths(rest_api_id=api_id) try: extracted_path, resource = get_resource_for_path(path=relative_path, path_map=path_map) except Exception: return make_error_response('Unable to find path %s' % path, 404) integrations = resource.get('resourceMethods', {}) integration = integrations.get(method, {}) if not integration: integration = integrations.get('ANY', {}) integration = integration.get('methodIntegration') if not integration: if method == 'OPTIONS' and 'Origin' in headers: # default to returning CORS headers if this is an OPTIONS request return get_cors_response(headers) return make_error_response( 'Unable to find integration for path %s' % path, 404) uri = integration.get('uri') if integration['type'] == 'AWS': if 'kinesis:action/' in uri: if uri.endswith('kinesis:action/PutRecords'): target = kinesis_listener.ACTION_PUT_RECORDS if uri.endswith('kinesis:action/ListStreams'): target = kinesis_listener.ACTION_LIST_STREAMS template = integration['requestTemplates'][APPLICATION_JSON] new_request = aws_stack.render_velocity_template(template, data) # forward records to target kinesis stream headers = aws_stack.mock_aws_request_headers(service='kinesis') headers['X-Amz-Target'] = target result = common.make_http_request(url=TEST_KINESIS_URL, method='POST', data=new_request, headers=headers) return result if method == 'POST': if uri.startswith('arn:aws:apigateway:') and ':sqs:path' in uri: template = integration['requestTemplates'][APPLICATION_JSON] account_id, queue = uri.split('/')[-2:] region_name = uri.split(':')[3] new_request = aws_stack.render_velocity_template( template, data) + '&QueueName=%s' % queue headers = aws_stack.mock_aws_request_headers( service='sqs', region_name=region_name) url = urljoin(TEST_SQS_URL, '%s/%s' % (account_id, queue)) result = common.make_http_request(url, method='POST', headers=headers, data=new_request) return result msg = 'API Gateway AWS integration action URI "%s", method "%s" not yet implemented' % ( uri, method) LOGGER.warning(msg) return make_error_response(msg, 404) elif integration['type'] == 'AWS_PROXY': if uri.startswith('arn:aws:apigateway:') and ':lambda:path' in uri: func_arn = uri.split(':lambda:path')[1].split( 'functions/')[1].split('/invocations')[0] data_str = json.dumps(data) if isinstance(data, (dict, list)) else data account_id = uri.split(':lambda:path')[1].split( ':function:')[0].split(':')[-1] source_ip = headers['X-Forwarded-For'].split(',')[-2] # Sample request context: # https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-create-api-as-simple-proxy-for-lambda.html#api-gateway-create-api-as-simple-proxy-for-lambda-test request_context = { 'path': relative_path, 'accountId': account_id, 'resourceId': resource.get('id'), 'stage': stage, 'identity': { 'accountId': account_id, 'sourceIp': source_ip, 'userAgent': headers['User-Agent'], } } try: path_params = extract_path_params( path=relative_path, extracted_path=extracted_path) except Exception: path_params = {} result = lambda_api.process_apigateway_invocation( func_arn, relative_path, data_str, headers, path_params=path_params, query_string_params=query_string_params, method=method, resource_path=path, request_context=request_context) if isinstance(result, FlaskResponse): return flask_to_requests_response(result) if isinstance(result, Response): return result response = Response() parsed_result = result if isinstance(result, dict) else json.loads(result) parsed_result = common.json_safe(parsed_result) parsed_result = {} if parsed_result is None else parsed_result response.status_code = int(parsed_result.get('statusCode', 200)) response.headers.update(parsed_result.get('headers', {})) try: if isinstance(parsed_result['body'], dict): response._content = json.dumps(parsed_result['body']) else: response._content = to_bytes(parsed_result['body']) except Exception: response._content = '{}' response.headers['Content-Length'] = len(response._content) return response else: msg = 'API Gateway action uri "%s" not yet implemented' % uri LOGGER.warning(msg) return make_error_response(msg, 404) elif integration['type'] == 'HTTP': function = getattr(requests, method.lower()) if isinstance(data, dict): data = json.dumps(data) result = function(integration['uri'], data=data, headers=headers) return result else: msg = ( 'API Gateway integration type "%s" for method "%s" not yet implemented' % (integration['type'], method)) LOGGER.warning(msg) return make_error_response(msg, 404) return 200