def consultar(self, pregunta, id): """ Dado un mensaje y un id de usuario estima la respuesta teniendo en cuenta los ultimos mensajes de dicho usuario """ contenido = pregunta contenido = contenido[0].lower() + contenido[1:] data = {'sender': id, 'message': contenido} loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: resp, output, tracker, idioma = loop.run_until_complete( self.handle_message_async(data)) except error.URLError as e: if isinstance(e.reason, timeout): msg = "Janet se encuentra en mantenimiento en estos momentos. " \ "Inténtelo de nuevo más tarde" raise error.HTTPError(self._url, 400, msg, None, None) if hasattr(e, 'code') and e.code == 400: msg = "Janet se encuentra en mantenimiento en estos momentos. " \ "Inténtelo de nuevo más tarde" raise error.HTTPError(self._url, 400, msg, None, None) else: raise error.HTTPError(self._url, 500, e.reason, None, None) finally: loop.close() return resp, output, tracker, idioma
def mocked_urlopen_valid_not_in_forge(*args, **kwargs): if "https://api.anaconda.org" in args[0]: return FakeResponse( data=ANACONDA_REPLY_EMPTY, _error=error.HTTPError('', 1, '', '', None), ) return FakeResponse(data=HUB_REPLY)
def get_images_from_url(url: str) -> list: images_list = None with request.urlopen(url) as resp: try: images_list = str(resp.read()).split("\\n") except error.HTTPError(url): print("Bad request") return images_list
def test_request_retries(self): retries = 5 e = error.HTTPError('some url', 503, 'some msg', '', BytesIO()) self.mock_open = MagicMock(side_effect=e) request.make_request('http://fauxurl.com', max_retries=retries, open_func=self.mock_open) self.assertEqual(self.mock_open.call_count, retries)
def test_error_handler_closing_error(self): http_err = error.HTTPError('url', 'GET', 400, {}, io.BytesIO()) self.assertFalse(http_err.closed) try: self.gateway.error_handler(http_err) except OAuthException: self.assertTrue(http_err.closed)
def open(self, url, conn_timeout=None): if conn_timeout == 0: raise urllib_error.URLError('Could not reach %s within deadline.' % url) if url.startswith('http'): self.opened.set() if self.error: raise urllib_error.HTTPError(url, self.error, None, None, Compatibility.BytesIO(b'glhglhg')) return urllib_request.addinfourl(Compatibility.BytesIO(self.rv), url, None, self.code)
def test_articles_skips_404_articles(self): from urllib import error self.create_patch('argos.core.membrane.feed.extract_entry_data', side_effect=error.HTTPError(url=None, code=404, msg=None, hdrs=None, fp=None)) articles = feed.articles(self.source) self.assertEquals(len(articles), 0)
def urlopen(self, req): """ Start an HTTP download """ if isinstance(req, str): req = compat_urllib_request.Request(req) #self.cs_ytdl.ToScreen(str(req.__dict__)) res = self.cs_ytdl.PythonUrlOpen(req).GetAwaiter().GetResult() rx = FakeResponse(res, self.cs_ytdl) if not res.IsSuccessStatusCode: raise compat_urllib_error.HTTPError(rx.url, rx.status, rx.reason, rx.headers, rx) return rx
def test_post(self, urlopen_mock): """ Test the post method, POST a network resource """ # Setup recorded_data_file = tutil.resolve_full_path( '../data/cmr/search/one_cmr_result.json') urlopen_mock.return_value = valid_cmr_response(recorded_data_file) data = net.post("http://cmr.earthdata.nasa.gov/search", {}) self.assertEqual(276, (data['hits'])) data = net.post("http://cmr.earthdata.nasa.gov/search", {}, accept='application/xml') self.assertEqual(276, (data['hits'])) data = net.post("http://cmr.earthdata.nasa.gov/search", {}, headers={'platforms': 'SMAP'}) self.assertEqual(276, (data['hits'])) # test that a 200 with headers will be logged urlopen_mock.return_value = valid_cmr_response(recorded_data_file, 200, [('head-a', 'value-a')]) try: net.logger.setLevel('DEBUG') with self.assertLogs(net.logger, level='DEBUG') as test_log: data = net.post("http://cmr.earthdata.nasa.gov/search", {}) self.assertEqual({"head-a": "value-a"}, data['http-headers'], "headers do not match") self.assertEqual(test_log.output, [ 'DEBUG:cmr.util.network: Headers->CMR= None', "DEBUG:cmr.util.network: POST Data= b''", "DEBUG:cmr.util.network: CMR->Headers = {'head-a': 'value-a'}" ], "log does not match") except AssertionError: self.fail('no log entry') net.logger.setLevel('ERROR') # test that a 204 can be processed urlopen_mock.return_value = valid_cmr_response(recorded_data_file, 204, [('head-a', 'value-a')]) data = net.post("http://cmr.earthdata.nasa.gov/search", {}) self.assertEqual({'http-headers': {"head-a": "value-a"}}, data) # test an error urlopen_mock.side_effect = urlerr.HTTPError(Mock(status=500), "500", "Unprocessable Entity", None, None) data = net.post("http://cmr.earthdata.nasa.gov/search/fake", {}) expected = { 'code': '500', 'reason': 'Unprocessable Entity', 'errors': ['Unprocessable Entity'] } self.assertEqual(expected, data)
def _validate_response(req: request.Request, response: Dict) -> None: """ Validate if response from UMWaw API is correct. Args: req: full request to UMWaw API response: obtained response for req """ if 'result' not in response: raise error.HTTPError(url=req.full_url, code=204, fp=None, hdrs={}, msg='Empty data.') if 'error' in response or not isinstance(response['result'], list): msg = response['error'] if 'error' in response else response['result'] raise error.HTTPError(url=req.full_url, code=400, fp=None, hdrs={}, msg=msg)
def consultar(self, pregunta, id): contenido = pregunta contenido = contenido[0].lower() + contenido[1:] data = {'user_id': id, 'content': contenido} try: req = request.Request(self._url, data=parse.urlencode(data).encode()) resp = request.urlopen(req, timeout=10).read() except error.URLError as e: if isinstance(e.reason, timeout): msg = "Janet se encuentra en mantenimiento en estos momentos. " \ "Inténtelo de nuevo más tarde" raise error.HTTPError(self._url, 400, msg, None, None) if hasattr(e, 'code') and e.code == 400: msg = "Janet se encuentra en mantenimiento en estos momentos. " \ "Inténtelo de nuevo más tarde" raise error.HTTPError(self._url, 400, msg, None, None) else: raise error.HTTPError(self._url, 500, e.reason, None, None) return json.loads(resp.decode('utf-8'))
def test_articles_skips_404_articles(self): from urllib import error self.create_patch('argos.core.membrane.extractor.extract_entry_data', side_effect=[ error.HTTPError(url=None, code=404, msg=None, hdrs=None, fp=None) ]) articles = [] collector.get_articles(self.feed, lambda a: articles.append(a)) self.assertEquals(len(articles), 0)
def failed_download_api(): error404 = urllib_error.HTTPError(code=404, msg='not found', hdrs=Mock(), fp=Mock(), url='') urllib_request.urlopen = Mock(side_effect=error404) api = Api() api._log.error = Mock() # api.retrieve_file_over_http = MagicMock() return api
def get(query, count=0): try: data = json.loads( request.urlopen(query).read())['query']['results']['quote'] except error.HTTPError: sleep(10) if count < 5: data = get(query, count=count + 1) else: raise error.HTTPError() except TypeError: data = '' return data
def downloadtitledump(data=None): if not data: data = getdata() today, previous = datetime.datetime.today( ), datetime.datetime.fromtimestamp(data['LASTANIDBDUMP']) if today < previous + datetime.timedelta(days=1): raise ValueError( "Per AniDB, cannot fetch title list more often than once a day") req = urequest.urlopen(ANIDB_TITLEDUMPURL) if req.getcode != (): raise uerror.HTTPError("Failed Fetch") xml = req.read() data['LASTANIDBDUMP'] = today.timestamp() with open(ANIDB_TITLEFILELOCATION, 'w') as f: f.write(xml) updatetitlelanguages(data=data) return True
def test_get_url_handles_exception_when_image_provider_returns_error(self): aircraft = Aircraft( [None, 'bar', None, self.seen_datetime, self.seen_datetime]) http_error = error.HTTPError('http://www.google.com', 404, 'foo', 'hdrs', mock.MagicMock()) image.request.urlopen = mock.MagicMock(side_effect=http_error) url = self.image_service.get_url(aircraft) image.request.urlopen.assert_called_once_with( 'https://commons.wikimedia.org/w/api.php?action=query&generator=categorymembers&' + 'gcmtitle=Category:bar_(aircraft)&gcmtype=file&redirects=1&prop=imageinfo&iiprop=url&format=json' ) self.assertIsNone(url) image.json.loads.assert_not_called()
def test_experimental_search(self, urlopen_mock): """ def search(query=None, filters=None, limit=None, options=None): """ # Setup recorded_file = tutil.resolve_full_path( '../data/cmr/search/ten_results_from_ghrc.json') # Basic urlopen_mock.return_value = valid_cmr_response(recorded_file, 200) generator = scom.experimental_search_by_page_generator( 'collections', {'provider': 'SEDAC'}) for item in generator: self.assertEqual("ORNL_DAAC", item['meta']['provider-id'], 'basic test') # page state uses scroll urlopen_mock.return_value = valid_cmr_response( recorded_file, 200, [('CMR-Scroll-Id', 'abcd')]) page_state = scom.create_page_state(limit=4000) page_state['CMR-Scroll-Id'] = 'abcd' generator = scom.experimental_search_by_page_generator( 'collections', {'provider': 'SEDAC'}, page_state=page_state) for item in generator: self.assertEqual("ORNL_DAAC", item['meta']['provider-id'], 'trigger scrloll id check') # error processing writes to log urlopen_mock.side_effect = urlerr.HTTPError(Mock(status=500), "500", "Server Error", None, None) generator = None try: with self.assertLogs(scom.logger, level='ERROR') as test_log: try: end_point = 'collections' query = {'provider': 'ORNL_DAAC'} generator = scom.experimental_search_by_page_generator( end_point, query) _ = next(generator) #consume generator to force action except StopIteration: pass #self.assertTrue(True, "generator should be empty") self.assertEqual(test_log.output, [ "ERROR:cmr.search.common:Error in generator: Server Error." ], "logs not matching") except AssertionError: self.fail('no log entry')
def __get_tag_list_by_url(self): try: response = request.urlopen(self.__site_url) if response.status != 200: raise error.HTTPError("status code - {}.\n{}".format( response.status, response.info())) content = response.read() html_counter = HTMLCounter() html_counter.feed(content.decode("utf-8")) self.__tag_list = html_counter.tag_list except error.HTTPError as e: print( "The server couldn\'t fulfill the request. HTTP response code: " .format(str(e.code))) except error.URLError as e: print("Failed to reach a server. Reason: ".format(str(e.reason)))
def test_load_url_error(mocked_urlopen): """ GIVEN loading file from a URL fails WHEN get_schemas is called with the path to the file THEN SchemaNotFoundError is raised. """ # Defining urlopen raising error mocked_urlopen.side_effect = error.HTTPError(url="some url", code=404, msg="message", hdrs="headers", fp="fp") # Create store store = helpers.ref._RemoteSchemaStore() store.spec_context = "path1" remote_context = "http://host.com/doc.json" with pytest.raises(exceptions.SchemaNotFoundError): store.get_schemas(context=remote_context)
def raise_for_status(response): """Raises stored :class:`HTTPError`, if one occurred. Taken from requests library. See: https://2.python-requests.org/en/master/_modules/requests/models/#Response.raise_for_status """ http_error_msg = "" if isinstance(response.reason, bytes): # We attempt to decode utf-8 first because some servers # choose to localize their reason strings. If the string # isn't utf-8, we fall back to iso-8859-1 for all other # encodings. (See PR #3538) try: reason = response.reason.decode("utf-8") except UnicodeDecodeError: reason = response.reason.decode("iso-8859-1") else: reason = response.reason if 400 <= response.status < 500: http_error_msg = u"%s Client Error: %s for url: %s" % ( response.status, reason, response.url, ) elif 500 <= response.status < 600: http_error_msg = u"%s Server Error: %s for url: %s" % ( response.status, reason, response.url, ) if http_error_msg: raise error.HTTPError( url=response.geturl(), code=response.getcode(), msg=http_error_msg, hdrs=response.info(), fp=None, )
def urlopen(*args, **kwargs): if raise_url_error: raise error.URLError('FAKE ERROR') if raise_http_error: raise error.HTTPError(url='', code=raise_http_error, msg='FAKE ERROR', hdrs={}, fp=None) class Response: @staticmethod def read(): return json.dumps({ 'transId': returned_token, 'cardNumber': '1111-2222-3333-4444', 'status': returned_status, 'amount': returned_amount, 'errorMessage': error_message, 'errorCode': error_code }).encode() return Response()
def test_get_historical_price__invalid(self): self.url_object.read.side_effect = error.HTTPError( 'url', 'code', '404', {}, None) srcprice = self.fetcher.get_historical_price('CURRENCY:INVALID', datetime.date(2014, 5, 7)) self.assertIsNone(srcprice)
def main(): parser = argparse.ArgumentParser('proxyget', description=desc) parser.add_argument('url', help="The url to retrieve") parser.add_argument('-o', '--out', default=None, help="If specified, write the output to this file") parser.add_argument('--server', default=default_server, help="The proxy server (either an IP or URL)") parser.add_argument('--port', type=int, default=default_port, help="The port (an integer between 0 and 65535)") parser.add_argument('--domain', default=default_domain, help="The username domain (can be empty string)") parser.add_argument('--user', default=default_user, help="The username for the proxy " "(will assume OS username if not passed)") parser.add_argument('-q', '--quiet', action='store_true', help="Select to quiet output") parser.add_argument('-b', '--binary', action="store_true", help="Set if downloading a binary file") args = parser.parse_args() out = Path(args.out) if args.out is not None else None proxy_info = proxyget.ProxyInfo(args.server, args.port, args.domain, args.user) try: proxy_info.assert_correct() except TypeError: # default not defined raise ValueError("--server and --port must be provided " "if no default proxy info specified") from None except ValueError: # bad port raise if args.out: if out is None: raise ValueError("If '--exe' specified, '--out' must be set") if not args.quiet: print(f'getting file from {args.url}. Please wait...') proxyget.get_file(args.url, out, args.binary, proxy_info=proxy_info, quiet=args.quiet) else: if not args.quiet: print(f'Getting site {args.url}...') data = proxyget.get(args.url, proxy_info=proxy_info) if data.status_code != 200: raise error.HTTPError(args.url, data.status_code, data.text, data.headers, None) print(data.text) if not args.quiet: print('done')
def raiser(req): raise error.HTTPError('url', 400, 'msg', {}, io.BytesIO(b'ANY MESSAGE'))
def send_request(request): raise error.HTTPError(self.url, 400, 'msg', {}, io.BytesIO(b'msg'))
def test_get(self, urlopen_mock): """ Test the get method, get a network resource """ # Setup recorded_data_file = tutil.resolve_full_path( '../data/cmr/search/one_cmr_result.json') urlopen_mock.return_value = valid_cmr_response(recorded_data_file) data = net.get("http://cmr.earthdata.nasa.gov/search") self.assertEqual(276, (data['hits']), "only required parameters") data = net.get("http://cmr.earthdata.nasa.gov/search", accept='application/xml') self.assertEqual(276, (data['hits']), "with an accept") data = net.get("http://cmr.earthdata.nasa.gov/search", headers={'platforms': 'SMAP'}) self.assertEqual(276, (data['hits']), "with a header") # test that a 200 can be processed with headers by writing to log urlopen_mock.return_value = valid_cmr_response(recorded_data_file, 200, [('key', 'value')]) try: net.logger.setLevel('DEBUG') with self.assertLogs(net.logger, level='DEBUG') as test_log: data = net.get("http://cmr.earthdata.nasa.gov/search") self.assertEqual(test_log.output, [ 'DEBUG:cmr.util.network: Headers->CMR= None', "DEBUG:cmr.util.network: CMR->Headers = {'key': 'value'}" ]) except AssertionError: self.fail("no log entry") net.logger.setLevel('ERROR') # test that a 204 can be processed urlopen_mock.return_value = valid_cmr_response(recorded_data_file, 204, [('key', 'value')]) data = net.get("http://cmr.earthdata.nasa.gov/search") self.assertEqual({'http-headers': { 'key': 'value' }}, data, "a 204 response") # standard tea pot test : error handling urlopen_mock.return_value = tutil.MockResponse("I'm a tea pot", 416, [('key', 'value')]) data = net.get("http://cmr.earthdata.nasa.gov/search") self.assertEqual("I'm a tea pot", data, "a 416 response") # exception handling urlopen_mock.side_effect = urlerr.HTTPError(Mock(status=500), "500", "Server Error", None, None) data = net.get("http://cmr.earthdata.nasa.gov/search") expected = { 'code': '500', 'reason': 'Server Error', 'errors': ['Server Error'] } self.assertEqual(expected, data, "exception was not caught") # test an error urlopen_mock.side_effect = urlerr.HTTPError(Mock(status=500), "500", "Unprocessable Entity", None, None) data = net.get("http://cmr.earthdata.nasa.gov/search/fake") expected = { 'code': '500', 'reason': 'Unprocessable Entity', 'errors': ['Unprocessable Entity'] } self.assertEqual(expected, data, "an exeption") urlopen_mock.side_effect = None # test list response such as the provider list from ingest recorded_data_file = tutil.resolve_full_path( '../data/cmr/ingest/providers.json') urlopen_mock.return_value = valid_cmr_response(recorded_data_file) data = net.get( "http://cmr.earthdata.nasa.gov/ingest/providers?pretty=true") self.assertEqual(110, len(data['items']))
def test_search_by_page(self, clr_scroll_mock, urlopen_mock): """ Test the inner function which performs the first half of a search """ recorded_file = tutil.resolve_full_path( '../data/cmr/search/ten_results_from_ghrc.json') urlopen_mock.return_value = valid_cmr_response(recorded_file, 200) query = {'keyword': 'water'} response = scom.search_by_page('collections', query) self.assertEqual(10, len(response), 'assumed page_state') # page state uses scroll page_state = scom.create_page_state(limit=4000) urlopen_mock.return_value = valid_cmr_response( recorded_file, 200, [('CMR-Scroll-Id', 'si-01')]) page_state['CMR-Scroll-Id'] = 'abcd' response = scom.search_by_page('collections', query, page_state=page_state) self.assertEqual(20, len(response), 'assumed page_state') # error processing 1 urlopen_mock.return_value = tutil.MockResponse("I'm a tea pot", 418) response = scom.search_by_page('collections', query, config={'debug': True}) expected = { 'errors': ['unknown response: I\'m a tea pot'], 'code': 0, 'reason': 'unknown response: I\'m a tea pot' } self.assertEqual(expected, response, "exeption") # error processing 2 urlopen_mock.return_value = valid_cmr_response('{"errors":["Error"]}', 500) urlopen_mock.side_effect = urlerr.HTTPError(Mock(status=500), "500", "Server Error", None, None) response = scom.search_by_page('collections', query) expected = { 'code': '500', 'reason': 'Server Error', 'errors': ['Server Error'] } self.assertEqual(expected, response, "exeption") # bad clear response is logged recorded_file = tutil.resolve_full_path( '../data/cmr/search/ten_results_from_ghrc.json') clr_scroll_mock.return_value = {'errors': ['bad scroll id']} urlopen_mock.return_value = valid_cmr_response(recorded_file, 200) urlopen_mock.side_effect = None response = scom.search_by_page('collections', query, page_state=page_state) self.assertEqual(10, len(response), "bad scroll id") # takes to long recorded_file = tutil.resolve_full_path( '../data/cmr/search/ten_results_from_ghrc.json') page_state['took'] = 300001 page_state['page_size'] = 1 urlopen_mock.return_value = valid_cmr_response(recorded_file, 200) response = scom.search_by_page('collections', query, page_state=page_state) self.assertEqual(10, len(response), "bad scroll id")
def http_error_default(self, req, fp, code, msg, headers): result = err.HTTPError(req.get_full_url(), code, msg, headers, fp) result.status = code return result
def mock(url: str) -> Dict[str, Any]: if url in data: return data[url] else: raise error.HTTPError(url, 404, f"Not found: {url}", {}, None)
def raiser(req): raise error.HTTPError('url', 400, 'msg', {}, io.BytesIO(err_msg))