def test_init_preexisting_table(self): self.client.raw.databases.create = Mock(side_effect=CogniteAPIError("", code=400)) self.client.raw.tables.create = Mock(side_effect=CogniteAPIError("", code=400)) state_store = RawStateStore(cdf_client=self.client, database=self.database, table=self.table) self.client.raw.databases.create.assert_called_once_with(self.database) self.client.raw.tables.create.assert_called_once_with(self.database, self.table)
def _raise_API_error(res: Response, payload: Dict): x_request_id = res.headers.get("X-Request-Id") code = res.status_code missing = None duplicated = None extra = {} try: error = res.json()["error"] if isinstance(error, str): msg = error elif isinstance(error, Dict): msg = error["message"] missing = error.get("missing") duplicated = error.get("duplicated") for k, v in error.items(): if k not in ["message", "missing", "duplicated", "code"]: extra[k] = v else: msg = res.content except Exception: msg = res.content error_details = {"X-Request-ID": x_request_id} if payload: error_details["payload"] = payload if missing: error_details["missing"] = missing if duplicated: error_details["duplicated"] = duplicated error_details["headers"] = res.request.headers.copy() APIClient._sanitize_headers(error_details["headers"]) log.debug("HTTP Error {} {} {}: {}".format(code, res.request.method, res.request.url, msg), extra=error_details) raise CogniteAPIError(msg, code, x_request_id, missing=missing, duplicated=duplicated, extra=extra)
def upload_zipped_code_to_files(client, file_bytes: bytes, name: str, ds: DataSet) -> FileMetadata: try: return client.files.upload_bytes( file_bytes, name=name, external_id=name, data_set_id=ds.id, overwrite=True, ) except CogniteAPIError as exc: if ds.id is None: # Error is not dataset related, so we immediately re-raise raise if ds.write_protected: err_msg = ( "Unable to upload file to WRITE-PROTECTED dataset: Deployment key MUST have capability " "'dataset:OWNER' (and have 'files:WRITE' scoped to the same dataset OR all files)." ) else: err_msg = ( "Unable to upload file to dataset: Deployment key must have capability " "'files:WRITE' scoped to the same dataset OR all files." ) logger.error(err_msg) raise CogniteAPIError(err_msg, exc.code, exc.x_request_id) from None
def test_api_error(self): e = CogniteAPIError( message="bla", code=200, x_request_id="abc", missing=[{ "id": 123 }], duplicated=[{ "externalId": "abc" }], successful=["bla"], ) assert "bla" == e.message assert 200 == e.code assert "abc" == e.x_request_id assert [{"id": 123}] == e.missing assert [{"externalId": "abc"}] == e.duplicated assert "bla" in e.__str__()
def collect_exc_info_and_raise( exceptions: List[Exception], successful: Optional[List] = None, failed: Optional[List] = None, unknown: Optional[List] = None, unwrap_fn: Optional[Callable] = None, ): missing = [] duplicated = [] missing_exc = None dup_exc = None unknown_exc = None for exc in exceptions: if isinstance(exc, CogniteAPIError): if exc.code in [400, 422] and exc.missing is not None: missing.extend(exc.missing) missing_exc = exc elif exc.code == 409 and exc.duplicated is not None: duplicated.extend(exc.duplicated) dup_exc = exc else: unknown_exc = exc else: unknown_exc = exc if unknown_exc: if isinstance(unknown_exc, CogniteAPIError) and (failed or unknown): raise CogniteAPIError( message=unknown_exc.message, code=unknown_exc.code, x_request_id=unknown_exc.x_request_id, missing=missing, duplicated=duplicated, successful=successful, failed=failed, unknown=unknown, unwrap_fn=unwrap_fn, ) raise unknown_exc if missing_exc: raise CogniteNotFoundError(not_found=missing, successful=successful, failed=failed, unknown=unknown, unwrap_fn=unwrap_fn) from missing_exc if dup_exc: raise CogniteDuplicatedError(duplicated=duplicated, successful=successful, failed=failed, unknown=unknown, unwrap_fn=unwrap_fn) from dup_exc
def retrieve_dataset(client: CogniteClient, ext_id: str) -> DataSet: """ Assuming internal IDs eventually will (read: should) die, we enforce the use of external IDs in this Github action... but since the SDK (cur 2.15.0) does not support data set external ID for FilesAPI, we need lookup... """ try: ds = client.data_sets.retrieve(external_id=ext_id) if ds: return ds raise ValueError(f"No dataset exists with external ID: '{ext_id}'") except CogniteAPIError as exc: err_msg = "Unable to retrieve dataset: Deployment key is missing capability 'dataset:READ'." logger.error(err_msg) raise CogniteAPIError(err_msg, exc.code, exc.x_request_id) from None
def run(self): unblocked_assets_lists = self._get_unblocked_assets() for unblocked_assets in unblocked_assets_lists: self.request_queue.put(list(unblocked_assets)) while self.assets_remaining(): res = self.response_queue.get() if isinstance(res, _AssetsFailedToPost): if isinstance(res.exc, CogniteAPIError): self.exception = res.exc for asset in res.assets: if res.exc.code >= 500: self.may_have_been_posted_assets.add(asset) elif res.exc.code >= 400: self.not_posted_assets.add(asset) for descendant in self._get_descendants(asset): self.not_posted_assets.add(descendant) else: raise res.exc else: for asset in res: self.posted_assets.add(asset) self.successfully_posted_external_ids.add( asset.external_id) unblocked_assets_lists = self._get_unblocked_assets() for unblocked_assets in unblocked_assets_lists: self.request_queue.put(list(unblocked_assets)) if len(self.may_have_been_posted_assets) > 0 or len( self.not_posted_assets) > 0: if isinstance(self.exception, CogniteAPIError): raise CogniteAPIError( message=self.exception.message, code=self.exception.code, x_request_id=self.exception.x_request_id, missing=self.exception.missing, duplicated=self.exception.duplicated, successful=AssetList(list(self.posted_assets)), unknown=AssetList(list(self.may_have_been_posted_assets)), failed=AssetList(list(self.not_posted_assets)), unwrap_fn=lambda a: a.external_id, ) raise self.exception