def test_delete_unauthorized_without_json(session: Session): with requests_mock.Mocker() as m: m.delete('http://citrine-testing.fake/api/v1/bar/something', status_code=401) with pytest.raises(Unauthorized): session.delete_resource('/bar/something')
def test_status_code_401(self, mock_request, _): resp = mock.Mock() resp.status_code = 401 mock_request.return_value = resp with pytest.raises(NonRetryableException): Session().checked_request('method', 'path') with pytest.raises(Unauthorized): Session().checked_request('method', 'path')
def test_status_code_425(self, mock_request, _): resp = mock.Mock() resp.status_code = 425 mock_request.return_value = resp with pytest.raises(RetryableException): Session().checked_request('method', 'path') with pytest.raises(WorkflowNotReadyException): Session().checked_request('method', 'path')
def test_get_refresh_token_failure(session: Session): session.access_token_expiration = datetime.utcnow() - timedelta(minutes=1) with requests_mock.Mocker() as m: m.post('http://citrine-testing.fake/api/v1/tokens/refresh', status_code=401) with pytest.raises(UnauthorizedRefreshToken): session.get_resource('/foo')
def test_status_code_409(self, mock_request, _): resp = mock.Mock() resp.status_code = 409 mock_request.return_value = resp with pytest.raises(NonRetryableException): Session().checked_request('method', 'path') with pytest.raises(WorkflowConflictException): Session().checked_request('method', 'path')
def test_cursor_paged_resource(): full_result_set = list(range(26)) fake_request = make_fake_cursor_request_function(full_result_set) # varying page size should not affect final result assert list(Session.cursor_paged_resource(fake_request, 'foo', forward=True, per_page=10)) == full_result_set assert list(Session.cursor_paged_resource(fake_request, 'foo', forward=True, per_page=26)) == full_result_set assert list(Session.cursor_paged_resource(fake_request, 'foo', forward=True, per_page=40)) == full_result_set
def test_failed_put_with_stacktrace(session: Session): with requests_mock.Mocker() as m: m.put('http://citrine-testing.fake/api/v1/bad-endpoint', status_code=500, json={'debug_stacktrace': 'blew up!'}) with pytest.raises(Exception) as e: session.put_resource('/bad-endpoint', json={}) assert '{"debug_stacktrace": "blew up!"}' == str(e.value)
def session(): session = Session(refresh_token='12345', scheme='http', host='citrine-testing.fake') # Default behavior is to *not* require a refresh - those tests can clear this out # As rule of thumb, we should be using freezegun or similar to never rely on the system clock # for these scenarios, but I thought this is light enough to postpone that for the time being session.access_token_expiration = datetime.utcnow() + timedelta(minutes=3) return session
def test_get_refreshes_token(session: Session): session.access_token_expiration = datetime.utcnow() - timedelta(minutes=1) token_refresh_response = refresh_token(datetime(2019, 3, 14, tzinfo=pytz.utc)) with requests_mock.Mocker() as m: m.post('http://citrine-testing.fake/api/v1/tokens/refresh', json=token_refresh_response) m.get('http://citrine-testing.fake/api/v1/foo', json={'foo': 'bar'}) resp = session.get_resource('/foo') assert {'foo': 'bar'} == resp assert datetime(2019, 3, 14) == session.access_token_expiration
def test_status_code_404(self, mock_request, _): resp = mock.Mock() resp.status_code = 404 resp.text = 'Some response text' mock_request.return_value = resp with pytest.raises(NonRetryableException): Session().checked_request('method', 'path')
def __init__(self, api_key: str, scheme: str = DEFAULT_SCHEME, host: str = DEFAULT_HOST, port: Optional[str] = None): self.logger = logging.getLogger(__name__) self.session: Session = Session(api_key, scheme, host, port)
def __init__(self, name: str, description: Optional[str] = None, session: Optional[Session] = Session()): self.name: str = name self.description: Optional[str] = description self.session: Session = session
def __init__(self, name: str, description: str, dimensions: List[Dimension], session: Session = Session()): self.name: str = name self.description: str = description self.dimensions: List[Dimension] = dimensions self.session: Session = session
def __init__(self, name: str, analysis: CrossValidationAnalysisConfiguration, project_id: Optional[UUID] = None, session: Session = Session()): self.name = name self.analysis = analysis self.project_id = project_id self.session = session
def test_good_json_response(session: Session): with requests_mock.Mocker() as m: json_to_validate = {"bar": "something"} m.put('http://citrine-testing.fake/api/v1/bar/something', status_code=200, json=json_to_validate) response_json = session.put_resource('bar/something', {"ignored": "true"}) assert response_json == json_to_validate
def __init__(self, name: str, description: str, descriptors: List[Descriptor], data: List[Mapping[str, Any]], session: Session = Session()): self.name: str = name self.description: str = description self.descriptors: List[Descriptor] = descriptors self.data: List[Mapping[str, Any]] = data self.session: Session = session
def __init__(self, *, name: str, description: str, subspaces: Optional[List[Union[UUID, DesignSpace]]] = None, dimensions: Optional[List[Dimension]] = None, session: Session = Session()): self.name: str = name self.description: str = description self.subspaces: List[Union[UUID, DesignSpace]] = subspaces or [] self.dimensions: List[Dimension] = dimensions or [] self.session: Session = session
def __init__(self, name: str, analysis: CrossValidationAnalysisConfiguration, project_id: Optional[UUID] = None, session: Session = Session()): warn("{this_class} is deprecated. Please use {replacement} instead". format(this_class=self.__class__.name, replacement=PredictorEvaluationWorkflow.__name__), category=DeprecationWarning) self.name = name self.analysis = analysis self.project_id = project_id self.session = session
def __init__(self, name: str, design_space_id: UUID, processor_id: Optional[UUID], predictor_id: UUID, project_id: Optional[UUID] = None, session: Session = Session()): self.name = name self.design_space_id = design_space_id self.processor_id = processor_id self.predictor_id = predictor_id self.project_id = project_id self.session = session
def test_post_refreshes_token_when_denied(session: Session): token_refresh_response = refresh_token(datetime(2019, 3, 14, tzinfo=pytz.utc)) with requests_mock.Mocker() as m: m.post('http://citrine-testing.fake/api/v1/tokens/refresh', json=token_refresh_response) m.register_uri('POST', 'http://citrine-testing.fake/api/v1/foo', [ {'status_code': 401, 'json': {'reason': 'invalid-token'}}, {'json': {'foo': 'bar'}} ]) resp = session.post_resource('/foo', json={'data': 'hi'}) assert {'foo': 'bar'} == resp assert datetime(2019, 3, 14) == session.access_token_expiration
def __init__(self, *, name: str, description: str, formulation_descriptor: FormulationDescriptor, ingredients: Set[str], constraints: Set[Constraint], labels: Optional[Mapping[str, Set[str]]] = None, resolution: float = 0.0001, session: Session = Session()): self.name: str = name self.description: str = description self.formulation_descriptor: FormulationDescriptor = formulation_descriptor self.ingredients: Set[str] = ingredients self.constraints: Set[Constraint] = constraints self.labels: Optional[Mapping[str, Set[str]]] = labels self.resolution: float = resolution self.session: Session = session
def test_connection_error(self, mock_request, _): data = {'stuff': 'not_used'} call_count = 0 # Simulate a request using a stale session that raises # a ConnectionError then works on the second call. def request_side_effect(method, uri): nonlocal call_count if call_count == 0: call_count += 1 raise requests.exceptions.ConnectionError else: return data mock_request.side_effect = request_side_effect resp = Session()._request_with_retry('method', 'path') assert resp == data
def test_status_code_400(self, mock_request, _): resp = mock.Mock() resp.status_code = 400 resp_json = { 'code': 400, 'message': 'a message', 'validation_errors': [ { 'failure_message': 'you have failed', }, ], } resp.json = lambda: resp_json resp.text = json.dumps(resp_json) mock_request.return_value = resp with pytest.raises(BadRequest) as einfo: Session().checked_request('method', 'path') assert einfo.value.api_error.validation_errors[0].failure_message \ == resp_json['validation_errors'][0]['failure_message']
def __init__(self, session: Session = Session()): self.session = session
def _async_gemd_batch_delete( id_list: List[Union[LinkByUID, UUID, str, BaseEntity]], project_id: UUID, session: Session, dataset_id: Optional[UUID], timeout: float = 2 * 60, polling_delay: float = 1.0) -> List[Tuple[LinkByUID, ApiError]]: """ Shared implementation of Async GEMD Batch deletion. See documentation for _gemd_batch_delete. The only difference is that this version polls for an asynchronous result and can tolerate a very long runtime that the synchronous version cannot. Because this version can tolerate a long runtime, this versions allows for the removal of attribute templates. Parameters ---------- id_list: List[Union[LinkByUID, UUID, str, BaseEntity]] A list of the IDs of data objects to be removed. They can be passed as a LinkByUID tuple, a UUID, a string, or the object itself. A UUID or string is assumed to be a Citrine ID, whereas a LinkByUID or BaseEntity can also be used to provide an external ID. project_id: UUID The Project ID to use in the delete request. session: Session The Citrine session. dataset_id: Optional[UUID] = None An optional dataset ID, which if provided will mandate that all GEMD objects must be within the given dataset. timeout: float Amount of time to wait on the job (in seconds) before giving up. Defaults to 2 minutes. Note that this number has no effect on the underlying job itself, which can also time out server-side. polling_delay: float How long to delay between each polling retry attempt. Returns ------- List[Tuple[LinkByUID, ApiError]] A list of (LinkByUID, api_error) for each failure to delete an object. Note that this method doesn't raise an exception if an object fails to be deleted. """ scoped_uids = [] for uid in id_list: # And now normalize to id/scope pairs if isinstance(uid, BaseEntity): link_by_uid = LinkByUID.from_entity(uid, CITRINE_SCOPE) scoped_uids.append({ 'scope': link_by_uid.scope, 'id': link_by_uid.id }) elif isinstance(uid, LinkByUID): scoped_uids.append({'scope': uid.scope, 'id': uid.id}) elif isinstance(uid, UUID): scoped_uids.append({'scope': 'id', 'id': uid}) elif isinstance(uid, str): try: scoped_uids.append({'scope': 'id', 'id': UUID(uid)}) except ValueError: raise TypeError("{} does not look like a UUID".format(uid)) else: raise TypeError( "id_list must contain only LinkByUIDs, UUIDs, strings, or BaseEntities" ) body = {'ids': scoped_uids} if dataset_id is not None: body.update({'dataset_id': str(dataset_id)}) path = '/projects/{project_id}/gemd/async-batch-delete'.format( **{"project_id": project_id}) response = session.post_resource(path, body) job_id = response["job_id"] return _poll_for_async_batch_delete_result(project_id, session, job_id, timeout, polling_delay)
def __init__(self, project_id: UUID, session: Session = Session()): self.project_id = project_id self.session: Session = session
def test_get_no_refresh(session: Session): with requests_mock.Mocker() as m: m.get('http://citrine-testing.fake/api/v1/foo', json={'foo': 'bar'}) resp = session.get_resource('/foo') assert {'foo': 'bar'} == resp
def test_get_not_found(session: Session): with requests_mock.Mocker() as m: m.get('http://citrine-testing.fake/api/v1/foo', status_code=404) with pytest.raises(NotFound): session.get_resource('/foo')
def __init__(self, api_key: str, scheme: str = DEFAULT_SCHEME, host: str = DEFAULT_HOST, port: Optional[str] = None): self.session: Session = Session(api_key, scheme, host, port)
def _poll_for_job_completion(session: Session, project_id: Union[UUID, str], job: Union[JobSubmissionResponse, UUID, str], *, timeout: float = 2 * 60, polling_delay: float = 2.0) -> JobStatusResponse: """ Polls for job completion given a timeout, failing with an exception on job failure. This polls for job completion given the Job ID, failing appropriately if the job result was not successful. Parameters ---------- job The job submission object or job ID that was given from a job submission. timeout Amount of time to wait on the job (in seconds) before giving up. Defaults to 2 minutes. Note that this number has no effect on the underlying job itself, which can also time out server-side. polling_delay: How long to delay between each polling retry attempt. Returns ------- JobStatusResponse The job response information that can be used to extract relevant information from the completed job. """ if isinstance(job, JobSubmissionResponse): job_id = job.job_id else: job_id = job # pragma: no cover path = 'projects/{}/execution/job-status'.format(project_id) params = {'job_id': job_id} start_time = time() while True: response = session.get_resource(path=path, params=params) status: JobStatusResponse = JobStatusResponse.build(response) if status.status in ['Success', 'Failure']: break elif time() - start_time < timeout: logger.info( 'Job still in progress, polling status again in {:.2f} seconds.' .format(polling_delay)) sleep(polling_delay) else: logger.error( 'Job exceeded user timeout of {} seconds.'.format(timeout)) logger.debug('Last status: {}'.format(status.dump())) raise PollingTimeoutError('Job {} timed out.'.format(job_id)) if status.status == 'Failure': logger.debug('Job terminated with Failure status: {}'.format( status.dump())) failure_reasons = [] for task in status.tasks: if task.status == 'Failure': logger.error('Task {} failed with reason "{}"'.format( task.id, task.failure_reason)) failure_reasons.append(task.failure_reason) raise JobFailureError( message='Job {} terminated with Failure status. Failure reasons: {}' .format(job_id, failure_reasons), job_id=job_id, failure_reasons=failure_reasons) return status