def exception_callback(request, uri, headers): raise HTTPError("404 Page Not found.")
def request(self, method, path, headers=None, params=None, data=None, **kwargs): """ A wrapper around ``requests.request`` that handles boilerplate code specific to TruStar's API. :param str method: The method of the request (``GET``, ``PUT``, ``POST``, or ``DELETE``) :param str path: The path of the request, i.e. the piece of the URL after the base URL :param dict headers: A dictionary of headers that will be merged with the base headers for the SDK :param kwargs: Any extra keyword arguments. These will be forwarded to the call to ``requests.request``. :return: The response object. """ retry = self.retry attempted = False while not attempted or retry: # get headers and merge with headers from method parameter if it exists base_headers = self._get_headers(is_json=method in ["POST", "PUT"]) if headers is not None: base_headers.update(headers) url = "{}/{}".format(self.base, path) # make request response = requests.request(method=method, url=url, headers=base_headers, verify=self.verify, params=params, data=data, proxies=self.proxies, **kwargs) attempted = True # log request self.logger.debug("%s %s. Trace-Id: %s. Params: %s", method, url, response.headers.get('Trace-Id'), params) # refresh token if expired if self._is_expired_token_response(response): self._refresh_token() # if "too many requests" status code received, wait until next request will be allowed and retry elif retry and response.status_code == 429: wait_time = ceil(response.json().get('waitTime') / 1000) self.logger.debug( "Waiting %d seconds until next request allowed." % wait_time) # if wait time exceeds max wait time, allow the exception to be thrown if wait_time <= self.max_wait_time: time.sleep(wait_time) else: retry = False # request cycle is complete else: retry = False # raise exception if status code indicates an error if 400 <= response.status_code < 600: # get response json body, if one exists resp_json = None try: resp_json = response.json() except: pass # get message from json body, if one exists if resp_json is not None and 'message' in resp_json: reason = resp_json['message'] else: reason = "unknown cause" # construct error message message = "{} {} Error: {}".format( response.status_code, "Client" if response.status_code < 500 else "Server", reason) # raise HTTPError raise HTTPError(message, response=response) return response
def test_version_exists_raises_http_error(self, mock): mock.side_effect = HTTPError(Mock(status=404), 'not found') self.assertFalse(self.client._version_exists('mock version'))
def http_error(): return HTTPError(REQUEST_EXCEPTION_MESSAGE)
def test_service_request_500(self): requests = Mock() requests.get.side_effect = HTTPError('Server Error: 500') with self.assertRaises(HTTPError): claim_service_request(requests, self.service_request, '0f00')
def _download_one(self, table, quality_bitmask, download_dir, cutout_size, **kwargs): """Private method used by `download()` and `download_all()` to download exactly one file from the MAST archive. Always returns a `TargetPixelFile` or `LightCurve` object. """ # Make sure astroquery uses the same level of verbosity logging.getLogger('astropy').setLevel(log.getEffectiveLevel()) if download_dir is None: download_dir = self._default_download_dir() # if the SearchResult row is a TESScut entry, then download cutout if 'FFI Cutout' in table[0]['description']: try: log.debug("Started downloading TESSCut for '{}' sector {}." "".format(table[0]['target_name'], table[0]['sequence_number'])) path = self._fetch_tesscut_path(table[0]['target_name'], table[0]['sequence_number'], download_dir, cutout_size) except Exception as exc: msg = str(exc) if "504" in msg: # TESSCut will occasionally return a "504 Gateway Timeout # error" when it is overloaded. raise HTTPError('The TESS FFI cutout service at MAST appears ' 'to be temporarily unavailable. It returned ' 'the following error: {}'.format(exc)) else: raise SearchError('Unable to download FFI cutout. Desired target ' 'coordinates may be too near the edge of the FFI.' 'Error: {}'.format(exc)) return read(path, quality_bitmask=quality_bitmask, targetid=table[0]['targetid']) else: if cutout_size is not None: warnings.warn('`cutout_size` can only be specified for TESS ' 'Full Frame Image cutouts.', LightkurveWarning) # Whenever `astroquery.mast.Observations.download_products` is called, # a HTTP request will be sent to determine the length of the file # prior to checking if the file already exists in the local cache. # For performance, we skip this HTTP request and immediately try to # find the file in the cache. The path we check here is consistent # with the one hard-coded inside `astroquery.mast.Observations._download_files()` # in Astroquery v0.4.1. It would be good to submit a PR to astroquery # so we can avoid having to use this hard-coded hack. path = os.path.join(download_dir.rstrip('/'), "mastDownload", table['obs_collection'][0], table['obs_id'][0], table['productFilename'][0]) if os.path.exists(path): log.debug("File found in local cache.") else: from astroquery.mast import Observations log.debug("Started downloading {}.".format(table[:1]['dataURL'][0])) path = Observations.download_products(table[:1], mrp_only=False, download_dir=download_dir)['Local Path'][0] log.debug("Finished downloading.") return read(path, quality_bitmask=quality_bitmask, **kwargs)
def test_raises_exception_for_404_response(self, mocked_regular_season_player_box_scores): mocked_regular_season_player_box_scores.side_effect = HTTPError( response=MagicMock(status_code=codes.not_found) ) self.assertRaises(InvalidPlayerAndSeason, regular_season_player_box_scores, 'Mock Player', 2000)
def test_upsert_website_pipelines( settings, mocker, mock_auth, pipeline_settings, version, home_page, pipeline_exists, hard_purge, with_api, ): # pylint:disable=too-many-locals,too-many-arguments,too-many-branches,unused-argument """The correct concourse API args should be made for a website""" settings.CONCOURSE_HARD_PURGE = hard_purge hugo_projects_path = "https://github.com/org/repo" starter = WebsiteStarterFactory.create(source=STARTER_SOURCE_GITHUB, path=f"{hugo_projects_path}/site") if home_page: name = settings.ROOT_WEBSITE_NAME starter.config["root-url-path"] = "" else: name = "standard-course" starter.config["root-url-path"] = "courses" website = WebsiteFactory.create(starter=starter, name=name) instance_vars = f"%7B%22site%22%3A%20%22{website.name}%22%7D" url_path = f"/api/v1/teams/{settings.CONCOURSE_TEAM}/pipelines/{version}/config?vars={instance_vars}" if not pipeline_exists: mock_get = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", side_effect=HTTPError(), ) else: mock_get = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.get_with_headers", return_value=({}, { "X-Concourse-Config-Version": "3" }), ) mock_put_headers = mocker.patch( "content_sync.pipelines.concourse.ConcourseApi.put_with_headers") existing_api = ConcourseApi("a", "b", "c", "d") if with_api else None pipeline = SitePipeline(website, api=existing_api) assert (pipeline.api == existing_api) is with_api pipeline.upsert_pipeline() mock_get.assert_any_call(url_path) mock_put_headers.assert_any_call( url_path, data=mocker.ANY, headers=({ "X-Concourse-Config-Version": "3" } if pipeline_exists else None), ) if version == VERSION_DRAFT: _, kwargs = mock_put_headers.call_args_list[0] bucket = settings.AWS_PREVIEW_BUCKET_NAME api_url = settings.OCW_STUDIO_DRAFT_URL else: _, kwargs = mock_put_headers.call_args_list[1] bucket = settings.AWS_PUBLISH_BUCKET_NAME api_url = settings.OCW_STUDIO_LIVE_URL config_str = json.dumps(kwargs) assert f"{hugo_projects_path}.git" in config_str assert settings.OCW_GTM_ACCOUNT_ID in config_str assert settings.OCW_IMPORT_STARTER_SLUG in config_str assert api_url in config_str if home_page: assert ( f"s3 sync s3://{settings.AWS_STORAGE_BUCKET_NAME}/{website.name} s3://{bucket}/{website.name}" in config_str) assert f"aws s3 sync course-markdown/public s3://{bucket}/" in config_str else: assert ( f"s3 sync s3://{settings.AWS_STORAGE_BUCKET_NAME}/courses/{website.name} s3://{bucket}/courses/{website.name}" in config_str) assert ( f"aws s3 sync course-markdown/public s3://{bucket}/courses/{website.name}" in config_str) assert f"purge/{website.name}" in config_str assert f" --metadata site-id={website.name}" in config_str has_soft_purge_header = "Fastly-Soft-Purge" in config_str assert has_soft_purge_header is not hard_purge
def create_pipeline_run(organization_uuid, pipeline_uuid, request_json): """Creates OrganizationPipelineRuns for a pipline.""" org_pipeline = find_organization_pipeline(organization_uuid, pipeline_uuid) input_file_uuids = request_json.get("inputs", []) if not org_pipeline: raise ValueError({"message": "organizational_pipeline_uuid not found"}) org_pipeline_input_files = search_organization_pipeline_input_files( org_pipeline.id, input_file_uuids) new_pipeline_run = OrganizationPipelineRun( organization_pipeline_id=org_pipeline.id, status_update_token=uuid.uuid4().hex, status_update_token_expires_at=datetime.now() + timedelta(days=7), share_token=uuid.uuid4().hex, share_password_hash=None, share_password_salt=None, ) db.session.add(new_pipeline_run) db.session.flush() new_pipeline = {"inputs": []} for opf in org_pipeline_input_files: sname = quote(opf.name) url = create_url(f"{pipeline_uuid}/{opf.uuid}-{sname}", sname) new_pipeline["inputs"].append({"url": url, "name": opf.name}) response = requests.post( f"{current_app.config[WORKFLOW_HOSTNAME]}/v1/pipelines/{org_pipeline.pipeline_uuid}/runs", headers={ "Content-Type": "application/json", ROLES_KEY: current_app.config[WORKFLOW_API_TOKEN], }, json=new_pipeline, ) try: created_pipeline = response.json() response.raise_for_status() new_pipeline_run.uuid = ( new_pipeline_run.pipeline_run_uuid) = created_pipeline.get("uuid") db.session.add(new_pipeline_run) for opf in org_pipeline_input_files: opf.organization_pipeline_run_id = new_pipeline_run.id db.session.add(opf) db.session.commit() created_pipeline.update({ "uuid": new_pipeline_run.uuid, }) return created_pipeline except ValueError as value_error: raise HTTPError("Non JSON payload returned") from value_error except HTTPError as http_error: raise ValueError(created_pipeline) from http_error
def test_other_http_error_is_raised(self, mocked_players_season_totals): mocked_players_season_totals.side_effect = HTTPError( response=MagicMock(status_code=codes.internal_server_error)) self.assertRaises(HTTPError, players_season_totals, season_end_year=2018)
def get_entry(self, entry_id, suppress_errors=False): self._query_num += 1 # if configured, work normally, deferring failure until the requested query # if self._query_num != self._fail_on_query_num: logger.debug('On query %d ...waiting to fail on #%d..' % (self._query_num, self._fail_on_query_num)) return super(IceTestStub, self).get_entry(entry_id, suppress_errors=suppress_errors) # NOTE: all tests below assume the first-run case where ignore_ice_related_errors=False. # All the expected results still hold if it's False, except the response should always be # 200 (success) ########################################################################################### # Test condition 1: ########################################################################################### # Uncomment this block to test connection errors. # # Expected results to verify (manually for now): # A) admin email sent (probably via an added log message...not working in DEV ATM) # B) generic user-facing error message about ICE access problems # C) 500 "internal server error" response (use Chrome's "network" develop tool) ########################################################################################### # raise requests.exceptions.ConnectionError() ########################################################################################### # Test condition 2: ########################################################################################### # Uncomment this block to test bad user data entry for part IDs # # Expected results to verify (manually for now): # A) *NO* admin email sent (probably via an added log message...not working in DEV ATM) # B) User error message lists parts that couldn't be found # C) 404 "not found" response (use Chrome's "network" develop tool) ########################################################################################### # return None ########################################################################################### # Test conditions 3-4: ########################################################################################### # uncomment a single status code and the bottom code block in this # method to test various supported error responses from ICE. # # Expected results to verify (manually for now): # A) Admin email sent (probably via an added log message...not working in DEV ATM) # B) User error message mentions generic ICE-related problems # C) 500 "internal server error" response (use Chrome's "network" develop tool) # Condition 3 # message = 'Bad client request' # status = BAD_REQUEST # Condition 4 # message = 'Internal Server Error' # status = INTERNAL_SERVER_ERROR ########################################################################################### # Test condition 5: ########################################################################################### # Uncomment a status code and the bottom code block in this # method to test various supported error responses from ICE. # # Expected results to verify (manually for now): # A) *NO* Admin email sent (probably via an added log message...not working in DEV ATM) # B) User error message specifically mentions ICE permission problems # C) 403 "forbidden" response (use Chrome's "network" develop tool) message = 'Forbidden' status = codes.forbidden ########################################################################################### # Supporting error-generation code for test conditions 3-5 above ########################################################################################### from requests import HTTPError response = HTTPResponse(status=status) error = HTTPError(message, response=response) raise error
def test_validate_signature(self): """Check validate signature""" data = { "data": { "external_ids": ['SignedChain'], "dblock": { "height": 10000 }, "content": "123" } } result = ValidateSignatureUtil.validate_signature( data, True, self.request_handler) self.assertEqual("not_signed/invalid_chain_format", result) data = { "data": { "external_ids": [ 'SignedChain', '0x01', '171e5851451ce6f2d9730c1537da4375feb442870d835c54a1bca8ffa7e2bda7', 'idpub', '779229d23cdb7380869e63e5156a5497170bceec139b37e7af2a4d1aae14d053d19f7626e08d4bbb003d4b05d941f43402f1288af2ff0391a2dee4abf0919b07', '2019-01-18T14:17:50Z', ], "dblock": { "height": 10000 }, "content": "123" } } result = ValidateSignatureUtil.validate_signature( data, True, self.request_handler) self.assertEqual("not_signed/invalid_chain_format", result) data = { "data": { "external_ids": [ 'SignedChain', '0x01', '171e5851451ce6f2d9730c1537da4375feb442870d835c54a1bca8ffa7e2bda7', 'idpub3NegGMKn2CDcx3A9JkpoMm2jE9KxchxqHTmXPvJnmUJGizfrb7', '779229d23cdb7380869e63e5156a5497170bceec139b37e7af2a4d1aae14d053d19f7626e08d4bbb003d4b05d941ty3402f1288af2ff0391a2dee4abf0919b07', '2019-01-18T14:17:50Z', ], "dblock": { "height": 10000 }, "content": "123" } } result = ValidateSignatureUtil.validate_signature( data, True, self.request_handler) self.assertEqual("not_signed/invalid_chain_format", result) data = { "data": { "external_ids": [ 'SignedChain', '0x01', '171e5851451ce6f2d9730c1537da4375feb442870d835c54a1bca8ffa7e2bda7', 'idpub3NegGMKn2CDcx3A9JkpoMm2jE9KxchxqHTmXPvJnmUJGizfrb7', '779229d23cdb7380869e63e5156a5497170bceec139b37e7af2a4d1aae14d053d19f7626e08d4bbb003d4b05d941f43402f1288af2ff0391a2dee4abf0919b07', '2019-01-18T14:17:50Z', ], "dblock": { "height": 10000 }, "content": "123" } } with patch( "factom_sdk.request_handler.request_handler.requests.request" ) as mock_get: mock_error = HTTPError() mock_error.response = Mock() mock_error.response.status_code = 404 mock_get.side_effect = mock_error response = ValidateSignatureUtil.validate_signature( data, True, self.request_handler) self.assertEqual("key_not_found", response) with self.assertRaises(HTTPError) as cm: with patch( "factom_sdk.request_handler.request_handler.requests.request" ) as mock_get: mock_error = HTTPError() mock_error.response = Mock() mock_error.response.status_code = 500 mock_get.side_effect = mock_error ValidateSignatureUtil.validate_signature( data, True, self.request_handler) self.assertTrue("" in str(cm.exception)) with patch( "factom_sdk.request_handler.request_handler.requests.request" ) as mock_get: json = { "data": { "key": "123", "retired_height": 1001, "activated_height": 1001, } } mock_get.return_value.ok = True mock_get.return_value.json.return_value = json response = ValidateSignatureUtil.validate_signature( data, True, self.request_handler) self.assertEqual("retired_key", response) data = { "data": { "external_ids": [ 'SignedEntry', '0x01', '171e5851451ce6f2d9730c1537da4375feb442870d835c54a1bca8ffa7e2bda7', 'idpub3NegGMKn2CDcx3A9JkpoMm2jE9KxchxqHTmXPvJnmUJGizfrb7', '779229d23cdb7380869e63e5156a5497170bceec139b37e7af2a4d1aae14d053d19f7626e08d4bbb003d4b05d941f43402f1288af2ff0391a2dee4abf0919b07', '2019-01-18T14:17:50Z', ], "dblock": { "height": 1000 }, "content": "123" } } with patch( "factom_sdk.request_handler.request_handler.requests.request" ) as mock_get: json = { "data": { "key": "123", "retired_height": 900, "activated_height": 1001, } } mock_get.return_value.ok = True mock_get.return_value.json.return_value = json response = ValidateSignatureUtil.validate_signature( data, False, self.request_handler) self.assertEqual("retired_key", response) with patch( "factom_sdk.request_handler.request_handler.requests.request" ) as mock_get: json = { "data": { "key": "idpub3NegGMKn2CDcx3A9JkpoMm2jE9KxchxqHTmXPvJnmUJGizfrb7", "retired_height": 1001, "activated_height": 900, } } mock_get.return_value.ok = True mock_get.return_value.json.return_value = json response = ValidateSignatureUtil.validate_signature( data, False, self.request_handler) self.assertEqual("invalid_signature", response)
} r = client.post('/api/v1.0/waivers/', data=json.dumps(data), content_type='application/json') res_data = json.loads(r.get_data(as_text=True)) assert r.status_code == 201 assert res_data['username'] == 'foo' assert res_data['subject'] == {'original_spec_nvr': 'somedata'} assert res_data['testcase'] == 'sometest' assert res_data['product_version'] == 'fool-1' assert res_data['waived'] is True assert res_data['comment'] == 'it broke' @patch('waiverdb.api_v1.get_resultsdb_result', side_effect=HTTPError(response=Mock(status=404))) @patch('waiverdb.auth.get_user', return_value=('foo', {})) def test_create_waiver_with_unknown_result_id(mocked_get_user, mocked_resultsdb, client, session): data = { 'result_id': 123, 'product_version': 'fool-1', 'waived': True, 'comment': 'it broke', } mocked_resultsdb.return_value.status_code = 404 r = client.post('/api/v1.0/waivers/', data=json.dumps(data), content_type='application/json') res_data = json.loads(r.get_data(as_text=True))
def on_error(self, exc: Exception): if isinstance(exc, HTTPError): res: Response = exc.response raise HTTPError(res.json()) from exc super().on_error(exc)
def raise_for_status(self): raise HTTPError('Server Error: 500 for url: https://google.com')
def mock_request_error(*args, **kwargs): raise HTTPError()
def test_http_error(self, bad_mock): """ Test a Bad Create with HTTP error """ bad_mock.side_effect = HTTPError() inventory = Inventory("tools", "widget1", False, "new") inventory.create() self.assertIsNone(inventory.id)
def raise_for_status(self): if self.status_code == 200: return error = HTTPError() error.response = self raise error
def get_from_cache(url: str, cache_dir: Path = None) -> Path: r""" 尝试在cache_dir中寻找url定义的资源; 如果没有找到; 则从url下载并将结果放在cache_dir下,缓存的名称由url的结果推断而来。会将下载的 文件解压,将解压后的文件全部放在cache_dir文件夹中。 如果从url中下载的资源解压后有多个文件,则返回目录的路径; 如果只有一个资源文件,则返回具体的路径。 :param url: 资源的 url :param cache_dir: cache 目录 :return: 路径 """ cache_dir.mkdir(parents=True, exist_ok=True) filename = re.sub(r".+/", "", url) dir_name, suffix = split_filename_suffix(filename) # 寻找与它名字匹配的内容, 而不关心后缀 match_dir_name = match_file(dir_name, cache_dir) if match_dir_name: dir_name = match_dir_name cache_path = cache_dir / dir_name # get cache path to put the file if cache_path.exists(): return get_filepath(cache_path) # make HEAD request to check ETag TODO ETag可以用来判断资源是否已经更新了,之后需要加上 # response = requests.head(url, headers={"User-Agent": "fastNLP"}) # if response.status_code != 200: # raise IOError( # f"HEAD request failed for url {url} with status code {response.status_code}." # ) # add ETag to filename if it exists # etag = response.headers.get("ETag") if not cache_path.exists(): # Download to temporary file, then copy to cache dir once finished. # Otherwise you get corrupt cache entries if the download gets interrupted. # GET file object req = requests.get(url, stream=True, headers={"User-Agent": "fastNLP"}) if req.status_code == 200: success = False fd, temp_filename = tempfile.mkstemp() uncompress_temp_dir = None try: content_length = req.headers.get("Content-Length") total = int( content_length) if content_length is not None else None progress = tqdm(unit="B", total=total, unit_scale=1) logger.info("%s not found in cache, downloading to %s" % (url, temp_filename)) with open(temp_filename, "wb") as temp_file: for chunk in req.iter_content(chunk_size=1024 * 16): if chunk: # filter out keep-alive new chunks progress.update(len(chunk)) temp_file.write(chunk) progress.close() logger.info(f"Finish download from {url}") # 开始解压 if suffix in ('.zip', '.tar.gz', '.gz'): uncompress_temp_dir = tempfile.mkdtemp() logger.debug( f"Start to uncompress file to {uncompress_temp_dir}") if suffix == '.zip': unzip_file(Path(temp_filename), Path(uncompress_temp_dir)) elif suffix == '.gz': ungzip_file(temp_filename, uncompress_temp_dir, dir_name) else: untar_gz_file(Path(temp_filename), Path(uncompress_temp_dir)) filenames = os.listdir(uncompress_temp_dir) if len(filenames) == 1: if os.path.isdir( os.path.join(uncompress_temp_dir, filenames[0])): uncompress_temp_dir = os.path.join( uncompress_temp_dir, filenames[0]) cache_path.mkdir(parents=True, exist_ok=True) logger.debug("Finish un-compressing file.") else: uncompress_temp_dir = temp_filename cache_path = str(cache_path) + suffix # 复制到指定的位置 logger.info(f"Copy file to {cache_path}") if os.path.isdir(uncompress_temp_dir): for filename in os.listdir(uncompress_temp_dir): if os.path.isdir( os.path.join(uncompress_temp_dir, filename)): shutil.copytree( os.path.join(uncompress_temp_dir, filename), cache_path / filename) else: shutil.copyfile( os.path.join(uncompress_temp_dir, filename), cache_path / filename) else: shutil.copyfile(uncompress_temp_dir, cache_path) success = True except Exception as e: logger.error(e) raise e finally: if not success: if cache_path.exists(): if cache_path.is_file(): os.remove(cache_path) else: shutil.rmtree(cache_path) os.close(fd) os.remove(temp_filename) if uncompress_temp_dir is None: pass elif os.path.isdir(uncompress_temp_dir): shutil.rmtree(uncompress_temp_dir) elif os.path.isfile(uncompress_temp_dir): os.remove(uncompress_temp_dir) return get_filepath(cache_path) else: raise HTTPError( f"Status code:{req.status_code}. Fail to download from {url}.")
def wrapper(*args, **kwargs): kwargs.pop('client_options', None) try: return method(*args, **kwargs) except (CommunicationError, EncoderError) as e: raise HTTPError(e)
def test_raises_non_500_http_error(self, mocked_regular_season_player_box_scores): mocked_regular_season_player_box_scores.side_effect = HTTPError(response=MagicMock(status_code=codes.bad_request)) self.assertRaises(HTTPError, regular_season_player_box_scores, 'Mock Player', 2000)
def raise_for_status(self): if self.status_code != 200: raise HTTPError("BAD") return self.status_code
def test_service_request_404(self): requests = Mock() requests.get.side_effect = HTTPError('Client Error: 404') with self.assertRaises(HTTPError): claim_service_request(requests, self.service_request, '0f00')
class TestGoogleDataprepHook(unittest.TestCase): def setUp(self): with mock.patch("airflow.hooks.base_hook.BaseHook.get_connection") as conn: conn.return_value.extra_dejson = EXTRA self.hook = dataprep.GoogleDataprepHook(dataprep_conn_id="dataprep_default") @patch("airflow.providers.google.cloud.hooks.dataprep.requests.get") def test_get_jobs_for_job_group_should_be_called_once_with_params(self, mock_get_request): self.hook.get_jobs_for_job_group(JOB_ID) mock_get_request.assert_called_once_with( f"{URL}/{JOB_ID}/jobs", headers={"Content-Type": "application/json", "Authorization": f"Bearer {TOKEN}"}, ) @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[HTTPError(), mock.MagicMock()], ) def test_get_jobs_for_job_group_should_pass_after_retry(self, mock_get_request): self.hook.get_jobs_for_job_group(JOB_ID) assert mock_get_request.call_count == 2 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[mock.MagicMock(), HTTPError()], ) def test_get_jobs_for_job_group_should_not_retry_after_success(self, mock_get_request): # pylint: disable=no-member self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock() self.hook.get_jobs_for_job_group(JOB_ID) assert mock_get_request.call_count == 1 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[HTTPError(), HTTPError(), HTTPError(), HTTPError(), mock.MagicMock()], ) def test_get_jobs_for_job_group_should_retry_after_four_errors(self, mock_get_request): # pylint: disable=no-member self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock() self.hook.get_jobs_for_job_group(JOB_ID) assert mock_get_request.call_count == 5 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[HTTPError(), HTTPError(), HTTPError(), HTTPError(), HTTPError()], ) def test_get_jobs_for_job_group_raise_error_after_five_calls(self, mock_get_request): with pytest.raises(RetryError) as err: # pylint: disable=no-member self.hook.get_jobs_for_job_group.retry.sleep = mock.Mock() self.hook.get_jobs_for_job_group(JOB_ID) assert "HTTPError" in str(err) assert mock_get_request.call_count == 5 @patch("airflow.providers.google.cloud.hooks.dataprep.requests.get") def test_get_job_group_should_be_called_once_with_params(self, mock_get_request): self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) mock_get_request.assert_called_once_with( f"{URL}/{JOB_ID}", headers={ "Content-Type": "application/json", "Authorization": f"Bearer {TOKEN}", }, params={"embed": "", "includeDeleted": False}, ) @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[HTTPError(), mock.MagicMock()], ) def test_get_job_group_should_pass_after_retry(self, mock_get_request): self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert mock_get_request.call_count == 2 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[mock.MagicMock(), HTTPError()], ) def test_get_job_group_should_not_retry_after_success(self, mock_get_request): self.hook.get_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert mock_get_request.call_count == 1 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[ HTTPError(), HTTPError(), HTTPError(), HTTPError(), mock.MagicMock(), ], ) def test_get_job_group_should_retry_after_four_errors(self, mock_get_request): self.hook.get_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert mock_get_request.call_count == 5 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.get", side_effect=[HTTPError(), HTTPError(), HTTPError(), HTTPError(), HTTPError()], ) def test_get_job_group_raise_error_after_five_calls(self, mock_get_request): with pytest.raises(RetryError) as err: # pylint: disable=no-member self.hook.get_job_group.retry.sleep = mock.Mock() self.hook.get_job_group(JOB_ID, EMBED, INCLUDE_DELETED) assert "HTTPError" in str(err) assert mock_get_request.call_count == 5 @patch("airflow.providers.google.cloud.hooks.dataprep.requests.post") def test_run_job_group_should_be_called_once_with_params(self, mock_get_request): data = '"{\\"wrangledDataset\\": {\\"id\\": 1234567}}"' self.hook.run_job_group(body_request=DATA) mock_get_request.assert_called_once_with( f"{URL}", headers={ "Content-Type": "application/json", "Authorization": f"Bearer {TOKEN}", }, data=data, ) @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.post", side_effect=[HTTPError(), mock.MagicMock()], ) def test_run_job_group_should_pass_after_retry(self, mock_get_request): self.hook.run_job_group(body_request=DATA) assert mock_get_request.call_count == 2 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.post", side_effect=[mock.MagicMock(), HTTPError()], ) def test_run_job_group_should_not_retry_after_success(self, mock_get_request): self.hook.run_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member self.hook.run_job_group(body_request=DATA) assert mock_get_request.call_count == 1 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.post", side_effect=[ HTTPError(), HTTPError(), HTTPError(), HTTPError(), mock.MagicMock(), ], ) def test_run_job_group_should_retry_after_four_errors(self, mock_get_request): self.hook.run_job_group.retry.sleep = mock.Mock() # pylint: disable=no-member self.hook.run_job_group(body_request=DATA) assert mock_get_request.call_count == 5 @patch( "airflow.providers.google.cloud.hooks.dataprep.requests.post", side_effect=[HTTPError(), HTTPError(), HTTPError(), HTTPError(), HTTPError()], ) def test_run_job_group_raise_error_after_five_calls(self, mock_get_request): with pytest.raises(RetryError) as err: # pylint: disable=no-member self.hook.run_job_group.retry.sleep = mock.Mock() self.hook.run_job_group(body_request=DATA) assert "HTTPError" in str(err) assert mock_get_request.call_count == 5
def raise_for_status(self): if not 200 <= self.status_code < 300: raise HTTPError(f'{self.status_code}: Boom!')
def test_raises_exception_for_500_response(self, mocked_http_client): mocked_http_client.regular_season_player_box_scores.side_effect = HTTPError( response=mock.Mock(status_code=codes.internal_server_error)) self.assertRaises(InvalidPlayerAndSeason, regular_season_player_box_scores, 'Mock Player', 2000)
def test_create_version_raises_http_error(self, mock_exists, mock_response): mock_response.side_effect = HTTPError(Mock(status=500), 'Error') self.assertRaises(HTTPError, self.client.create_version, 'mock_version', 1)
def http_error_with_url(): raise HTTPError("unable to process page%20url")
def stage_data(self, uids): """ Stage ALMA data Parameters ---------- uids : list or str A list of valid UIDs or a single UID. UIDs should have the form: 'uid://A002/X391d0b/X7b' Returns ------- data_file_table : Table A table containing 3 columns: the UID, the file URL (for future downloading), and the file size """ """ With log.set_level(10) INFO: Staging files... [astroquery.alma.core] DEBUG: First request URL: https://almascience.eso.org/rh/submission [astroquery.alma.core] DEBUG: First request payload: {'dataset': [u'ALMA+uid___A002_X3b3400_X90f']} [astroquery.alma.core] DEBUG: First response URL: https://almascience.eso.org/rh/checkAuthenticationStatus/3f98de33-197e-4692-9afa-496842032ea9/submission [astroquery.alma.core] DEBUG: Request ID: 3f98de33-197e-4692-9afa-496842032ea9 [astroquery.alma.core] DEBUG: Submission URL: https://almascience.eso.org/rh/submission/3f98de33-197e-4692-9afa-496842032ea9 [astroquery.alma.core] .DEBUG: Data list URL: https://almascience.eso.org/rh/requests/anonymous/786823226 [astroquery.alma.core] """ if isinstance(uids, six.string_types + (np.bytes_, )): uids = [uids] if not isinstance(uids, (list, tuple, np.ndarray)): raise TypeError("Datasets must be given as a list of strings.") log.info("Staging files...") self._get_dataarchive_url() url = urljoin(self._get_dataarchive_url(), 'rh/submission') log.debug("First request URL: {0}".format(url)) # 'ALMA+uid___A002_X391d0b_X7b' payload = {'dataset': ['ALMA+' + clean_uid(uid) for uid in uids]} log.debug("First request payload: {0}".format(payload)) self._staging_log = {'first_post_url': url} # Request staging for the UIDs # This component cannot be cached, since the returned data can change # if new data are uploaded response = self._request('POST', url, data=payload, timeout=self.TIMEOUT, cache=False) self._staging_log['initial_response'] = response log.debug("First response URL: {0}".format(response.url)) if 'login' in response.url: raise ValueError( "You must login before downloading this data set.") if response.status_code == 405: if hasattr(self, '_last_successful_staging_log'): log.warning( "Error 405 received. If you have previously staged " "the same UIDs, the result returned is probably " "correct, otherwise you may need to create a fresh " "astroquery.Alma instance.") return self._last_successful_staging_log['result'] else: raise HTTPError( "Received an error 405: this may indicate you " "have already staged the data. Try downloading " "the file URLs directly with download_files.") response.raise_for_status() if 'j_spring_cas_security_check' in response.url: time.sleep(1) # CANNOT cache this stage: it not a real data page! results in # infinite loops response = self._request('POST', url, data=payload, timeout=self.TIMEOUT, cache=False) self._staging_log['initial_response'] = response if 'j_spring_cas_security_check' in response.url: log.warning("Staging request was not successful. Try again?") response.raise_for_status() if 'j_spring_cas_security_check' in response.url: raise RemoteServiceError("Could not access data. This error " "can arise if the data are private and " "you do not have access rights or are " "not logged in.") request_id = response.url.split("/")[-2] self._staging_log['request_id'] = request_id log.debug("Request ID: {0}".format(request_id)) # Submit a request for the specific request ID identified above submission_url = urljoin(self._get_dataarchive_url(), url_helpers.join('rh/submission', request_id)) log.debug("Submission URL: {0}".format(submission_url)) self._staging_log['submission_url'] = submission_url staging_submission = self._request('GET', submission_url, cache=True) self._staging_log['staging_submission'] = staging_submission staging_submission.raise_for_status() data_page_url = staging_submission.url self._staging_log['data_page_url'] = data_page_url dpid = data_page_url.split("/")[-1] self._staging_log['staging_page_id'] = dpid # CANNOT cache this step: please_wait will happen infinitely data_page = self._request('GET', data_page_url, cache=False) self._staging_log['data_page'] = data_page data_page.raise_for_status() has_completed = False while not has_completed: time.sleep(1) summary = self._request('GET', url_helpers.join(data_page_url, 'summary'), cache=False) summary.raise_for_status() print(".", end='') sys.stdout.flush() has_completed = summary.json()['complete'] self._staging_log['summary'] = summary summary.raise_for_status() self._staging_log['json_data'] = json_data = summary.json() username = self.USERNAME if self.USERNAME else 'anonymous' # templates: # https://almascience.eso.org/dataPortal/requests/keflavich/946895898/ALMA/ # 2013.1.00308.S_uid___A001_X196_X93_001_of_001.tar/2013.1.00308.S_uid___A001_X196_X93_001_of_001.tar # uid___A002_X9ee74a_X26f0/2013.1.00308.S_uid___A002_X9ee74a_X26f0.asdm.sdm.tar url_decomposed = urlparse(data_page_url) base_url = ('{uri.scheme}://{uri.netloc}/' 'dataPortal/requests/{username}/' '{staging_page_id}/ALMA'.format( uri=url_decomposed, staging_page_id=dpid, username=username, )) tbl = self._json_summary_to_table(json_data, base_url=base_url) self._staging_log['result'] = tbl self._staging_log['file_urls'] = tbl['URL'] self._last_successful_staging_log = self._staging_log return tbl
class TestAPIRequest: @pytest.mark.parametrize( "setting", ["h_client_id", "h_client_secret", "h_authority", "h_api_url"]) def test_it_crashes_if_a_required_setting_is_missing( self, pyramid_request, setting): del pyramid_request.registry.settings[setting] with pytest.raises(KeyError, match=setting): HypothesisAPIService(None, pyramid_request) @pytest.mark.parametrize("verb", ["DELETE", "GET", "PATCH", "POST", "PUT"]) def test_it_sends_requests_to_the_h_api(self, pyramid_request, requests, svc, verb): # Retrieve the method to call, e.g. HypothesisAPIService.delete() or .get(). method = getattr(svc, verb.lower()) method("path") requests.request.assert_called_once_with( method=verb, url="https://example.com/api/path", auth=("TEST_CLIENT_ID", "TEST_CLIENT_SECRET"), timeout=10, ) def test_it_strips_leading_slashes_from_the_path(self, pyramid_request, requests, svc): svc.request("POST", "/path") assert requests.request.call_args[1][ "url"] == "https://example.com/api/path" # Instead of calling get() or post() etc you can also call request() # directly and pass in the HTTP verb as a string. def test_you_can_also_call_request_directly(self, pyramid_request, requests, svc): svc.request("PUT", "path") requests.request.assert_called_once_with( method="PUT", url="https://example.com/api/path", auth=("TEST_CLIENT_ID", "TEST_CLIENT_SECRET"), timeout=10, ) def test_it_sends_data_as_json(self, pyramid_request, requests, svc): svc.post("path", data={"foo": "bar"}) assert requests.request.call_args[1]["data"] == '{"foo": "bar"}' def test_it_sends_userid_as_x_forwarded_user(self, pyramid_request, requests, svc): svc.post("path", userid="acct:seanh@TEST_AUTHORITY") assert requests.request.call_args[1]["headers"][ "X-Forwarded-User"] == ("acct:seanh@TEST_AUTHORITY") @pytest.mark.parametrize( "exception", [ConnectionError(), HTTPError(), ReadTimeout(), TooManyRedirects()]) def test_it_raises_HAPIError_if_the_request_fails(self, exception, pyramid_request, requests, svc): requests.request.side_effect = exception with pytest.raises(HAPIError): svc.post("path") def test_it_raises_HAPINotFoundError_if_it_receives_a_404_response( self, pyramid_request, requests, svc): requests.request.return_value.status_code = 404 requests.request.return_value.raise_for_status.side_effect = HTTPError( response=requests.request.return_value) with pytest.raises(HAPINotFoundError) as exc_info: svc.post("path") assert ( exc_info.value.response == requests.request.return_value ), "It passes the h API response to HAPIError so that it gets logged" def test_it_raises_HAPIError_if_it_receives_an_error_response( self, pyramid_request, requests, svc): requests.request.return_value.raise_for_status.side_effect = HTTPError( response=requests.request.return_value) with pytest.raises(HAPIError) as exc_info: svc.post("path") assert ( exc_info.value.response == requests.request.return_value ), "It passes the h API response to HAPIError so that it gets logged" def test_you_can_tell_it_not_to_raise_for_certain_error_statuses( self, pyramid_request, requests, svc): response = Response() response.status_code = requests.request.return_value.status_code = 409 requests.request.return_value.raise_for_status.side_effect = HTTPError( response=response) svc.post("path", statuses=[409]) @pytest.fixture def context(self): context = mock.create_autospec( LTILaunch, spec_set=True, instance=True, h_userid="acct:seanh@TEST_AUTHORITY", ) return context @pytest.fixture(autouse=True) def requests(self, patch): return patch("lms.services.hapi.requests") @pytest.fixture def svc(self, context, pyramid_request): return HypothesisAPIService(context, pyramid_request)