def _send_xml(self, msg, context, diagnose): """ The helper method that submits an xml message to GC. """ context.diagnose = diagnose url = self.get_order_processing_url(diagnose) context.url = url headers = { 'Authorization': 'Basic %s' % (b64encode('%s:%s' % (self.vendor_id, self.merchant_key)), ), 'Content-Type': ' application/xml; charset=UTF-8', 'Accept': ' application/xml; charset=UTF-8' } ### assume this is working on Google App Engine try: from google.appengine.api import urlfetch self.__call_handler('on_xml_sending', context=context) response = urlfetch.fetch(url, msg, urlfetch.POST, headers) self.__call_handler('on_xml_sent', context=context) if response.status_code == 200: return response.content else: raise urlfetch.Error() except urlfetch.Error(), e: raise SystemError(message='Error in urlfetch.fetch', context=context, origin=e)
def testRetries(self, mock_fetch): mock_fetch.side_effect = urlfetch.Error() with self.assertRaises(urlfetch.Error): gitiles_service.FileContents('repo', 'commit_hash', 'path') mock_fetch.side_effect = urlfetch.Error(), mock.MagicMock( content='aGVsbG8=', status_code=200) self.assertEqual( gitiles_service.FileContents('repo', 'commit_hash', 'path'), 'hello') mock_fetch.side_effect = Exception(), mock.MagicMock( content='aGVsbG8=', status_code=200) with self.assertRaises(Exception): gitiles_service.FileContents('repo', 'commit_hash', 'path')
def test_retries_transient_errors(self): self.mock_urlfetch([ ({'url': 'http://localhost/123'}, urlfetch.Error()), ({'url': 'http://localhost/123'}, Response(408, 'clien timeout')), ({'url': 'http://localhost/123'}, Response(500, 'server error')), ({'url': 'http://localhost/123'}, Response(200, 'response body')), ]) response = net.request('http://localhost/123', max_attempts=4) self.assertEqual('response body', response)
def mock_urlfetch_fetch(self, url, method=None, payload=None, follow_redirects=None): """Override of urlfetch.fetch method; forwards to self.get/post.""" if not url.startswith('https://'): raise urlfetch.Error('Malformed URL') if method == 'GET': return MessageCatcher.get() elif method == 'POST': return MessageCatcher.post(urlparse.parse_qs(payload))
def _RequestAndProcessHttpErrors(url): """Requests a URL, converting HTTP errors to Python exceptions.""" response = urlfetch.fetch(url) if response.status_code == 404: raise NotFoundError('Server returned HTTP code %d for %s' % (response.status_code, url)) elif response.status_code != 200: raise urlfetch.Error('Server returned HTTP code %d for %s' % (response.status_code, url)) return response.content
def test_call_async_transient_error(self): calls = self.mock_urlfetch([ { 'url': 'http://example.com', 'payload': 'blah', 'method': 'POST', 'headers': { 'A': 'a' }, 'response': (500, { 'error': 'zzz' }), }, { 'url': 'http://example.com', 'payload': 'blah', 'method': 'POST', 'headers': { 'A': 'a' }, 'response': urlfetch.Error('blah'), }, { 'url': 'http://example.com', 'payload': 'blah', 'method': 'POST', 'headers': { 'A': 'a' }, 'response': (200, { 'abc': 'def' }), }, ]) response = service_account._call_async(url='http://example.com', payload='blah', method='POST', headers={ 'A': 'a' }).get_result() self.assertEqual({'abc': 'def'}, response) self.assertFalse(calls)
def _FetchWithRedirects(url, redirect_attempts=_REDIRECT_ATTEMPTS, **kwargs): if not redirect_attempts: raise urlfetch.Error('Too many redirects') logging.debug('Fetching with %s redirects remaining: %s', redirect_attempts, url) try: response = urlfetch.fetch(url, **kwargs) except urlfetch.Error: logging.exception('Error encountered while submitting request to %s', url) raise else: if response.status_code == httplib.FOUND: redirect_url = response.headers.get('Location') return _FetchWithRedirects(redirect_url, redirect_attempts=redirect_attempts - 1, **kwargs) else: return response
def get_prs_from_github(token, repo): headers = {'Authorization': 'token %s' % token} url = PULL_API % repo prs = [] while True: logging.info('fetching %s', url) response = urlfetch.fetch(url, headers=headers) if response.status_code == 404: logging.warning('repo was deleted?') # Returning no open PRs will make us fake a close event for each of # them, which is appropriate. return [] if response.status_code != 200: raise urlfetch.Error('status code %s' % response.status_code) prs += json.loads(response.content) m = re.search(r'<([^>]+)>; rel="next"', response.headers.get('Link', '')) if m: url = m.group(1) else: break logging.info('pr count: %d, github tokens left: %s', len(prs), response.headers.get('x-ratelimit-remaining')) return prs
def get(self): query = self.getSearchString() exact = self.getExact() if query != '': valinOrdasofn = self.request.get("ordasofn", allow_multiple=True) html = '<hr />' for ordasafn in valinOrdasofn: classMethodExecution = ''.join([ordasafn, '.doSearch(\'', query, '\', \'' + exact + '\')']) try: oneSearchResult = eval(classMethodExecution) except urlfetch.Error(): oneSearchResult = "<h2>Leitarniðurstöður skiluðu sér ekki</h2>" continue html = ''.join([html, oneSearchResult, "<hr />" ]) else: valinOrdasofn = [] html = '' template_values = { 'query': query, 'valinOrdasofn': valinOrdasofn, 'searchResults': html} path = os.path.join(os.path.dirname(__file__), 'search-input.html') self.response.out.write(template.render(path, template_values))
def get(self): id = cgi.escape(self.request.get('id')) callback = cgi.escape(self.request.get('callback')) reset = cgi.escape(self.request.get('_reset')) response = {'ok': True, 'result': None} if re.match(r"^\d+$", id): content = memcache.get(id) if content is not None and not reset: response['result'] = content else: q = db.GqlQuery('select * from AnimeV1 where id = :1', id) result = q.get() originalScore = None if result is not None: originalScore = result.score if result is not None and (datetime.now() - result.updated_datetime <= timedelta(hours=24)): content = { 'id': result.id, 'title': result.title, 'image': result.image, 'score': result.score, 'episodes': result.episodes, 'genres': result.genres } response['result'] = content memcache.set(id, content, 43200) else: try: logging.info('Fetching ' + MALAPI + id) result = urlfetch.fetch(MALAPI + id, deadline = 10) logging.info(result.status_code) if result.status_code == 200: content = formatResponse(result.content) response['result'] = content storeAnimeV1(id, content) if originalScore is not None and content['score'] != originalScore: logging.info('Anime Score Change: ' + content['title'] + ' ' + id + ': ' + str(originalScore) + ' -> ' + str(content['score'])) else: raise urlfetch.Error() except urlfetch.Error: # Try one more time before giving up try: logging.info('Fetching ' + MALSITE + id) result = urlfetch.fetch(MALSITE + id, deadline = 10, allow_truncated = True) logging.info(result.content) if result.status_code == 200: content = formatResponse(result.content, True) if content is None: raise urlfetch.Error() return response['result'] = content storeAnimeV1(id, content) if originalScore is not None and content['score'] != originalScore: logging.info('Anime Score Change: ' + content['title'] + ' ' + id + ': ' + str(originalScore) + ' -> ' + str(content['score'])) else: raise urlfetch.Error() except urlfetch.Error: response['ok'] = False else: response['ok'] = False jsonData = json.dumps(response, sort_keys=True) if callback and re.match(r'^[A-Za-z_$][A-Za-z0-9_$]*?$', callback): jsonData = callback + '(' + jsonData + ')' if response['ok'] is True and response['result'] is not None: self.response.headers['Cache-Control'] = 'public; max-age=43200' self.response.headers['Content-Type'] = 'application/javascript; charset=utf-8' self.response.headers['Vary'] = 'Accept-Encoding' self.response.headers['Proxy-Connection'] = 'Keep-Alive' self.response.headers['Connection'] = 'Keep-Alive' self.response.headers['Access-Control-Allow-Origin'] = '*' self.response.out.write(jsonData)