def test_building_operation_url(self): urls = { build_operation_url( "https://app.stage.neptune.ai", "api/leaderboard/v1/attributes/download" ), build_operation_url( "https://app.stage.neptune.ai", "/api/leaderboard/v1/attributes/download", ), build_operation_url( "https://app.stage.neptune.ai/", "api/leaderboard/v1/attributes/download", ), build_operation_url( "https://app.stage.neptune.ai/", "/api/leaderboard/v1/attributes/download", ), build_operation_url( "app.stage.neptune.ai", "api/leaderboard/v1/attributes/download" ), build_operation_url( "app.stage.neptune.ai", "/api/leaderboard/v1/attributes/download" ), build_operation_url( "app.stage.neptune.ai/", "api/leaderboard/v1/attributes/download" ), build_operation_url( "app.stage.neptune.ai/", "/api/leaderboard/v1/attributes/download" ), } self.assertEqual( {"https://app.stage.neptune.ai/api/leaderboard/v1/attributes/download"}, urls, )
def create_http_client_with_auth( credentials: Credentials, ssl_verify: bool, proxies: Dict[str, str]) -> Tuple[RequestsClient, ClientConfig]: client_config = get_client_config(credentials=credentials, ssl_verify=ssl_verify, proxies=proxies) config_api_url = credentials.api_url_opt or credentials.token_origin_address verify_client_version(client_config, neptune_client_version) endpoint_url = None if config_api_url != client_config.api_url: endpoint_url = build_operation_url(client_config.api_url, BACKEND_SWAGGER_PATH) http_client = create_http_client(ssl_verify=ssl_verify, proxies=proxies) http_client.authenticator = NeptuneAuthenticator( credentials.api_token, _get_token_client( credentials=credentials, ssl_verify=ssl_verify, proxies=proxies, endpoint_url=endpoint_url, ), ssl_verify, proxies, ) return http_client, client_config
def download_image_series_element( swagger_client: SwaggerClient, container_id: str, attribute: str, index: int, destination: str, ): url = build_operation_url( swagger_client.swagger_spec.api_url, swagger_client.api.getImageSeriesValue.operation.path_name, ) response = _download_raw_data( http_client=swagger_client.swagger_spec.http_client, url=url, headers={}, query_params={ "experimentId": container_id, "attribute": attribute, "index": index, }, ) _store_response_as_file( response, os.path.join( destination, "{}.{}".format(index, response.headers["content-type"].split("/")[-1]), ), )
def _build_multipart_urlset( swagger_client: SwaggerClient, target: FileUploadTarget ) -> MultipartUrlSet: urlnameset = MULTIPART_URLS[target] return MultipartUrlSet( start_chunked=getattr(swagger_client.api, urlnameset.start_chunked), finish_chunked=getattr(swagger_client.api, urlnameset.finish_chunked), send_chunk=build_operation_url( swagger_client.swagger_spec.api_url, getattr(swagger_client.api, urlnameset.send_chunk).operation.path_name, ), single=build_operation_url( swagger_client.swagger_spec.api_url, getattr(swagger_client.api, urlnameset.single).operation.path_name, ), )
def websockets_factory( self, project_id: str, run_id: str ) -> Optional[WebsocketsFactory]: base_url = re.sub(r"^http", "ws", self._client_config.api_url) return WebsocketsFactory( url=build_operation_url( base_url, f"/api/notifications/v1/runs/{project_id}/{run_id}/signal" ), session=self._http_client.authenticator.auth.session, proxies=self.proxies, )
def upload_file_attribute( swagger_client: SwaggerClient, container_id: str, attribute: str, source: Union[str, bytes], ext: str, multipart_config: Optional[MultipartConfig], ) -> List[NeptuneException]: if isinstance(source, str) and not os.path.isfile(source): return [FileUploadError(source, "Path not found or is a not a file.")] target = attribute if ext: target += "." + ext try: upload_entry = UploadEntry( source if isinstance(source, str) else BytesIO(source), target ) if multipart_config is None: # the legacy upload procedure url = build_operation_url( swagger_client.swagger_spec.api_url, swagger_client.api.uploadAttribute.operation.path_name, ) upload_configuration = DEFAULT_UPLOAD_CONFIG _upload_loop( file_chunk_stream=FileChunkStream(upload_entry, upload_configuration), http_client=swagger_client.swagger_spec.http_client, url=url, query_params={ "experimentId": container_id, "attribute": attribute, "ext": ext, }, ) else: _multichunk_upload( upload_entry, query_params={ "experimentIdentifier": container_id, "attribute": attribute, "ext": ext, }, swagger_client=swagger_client, multipart_config=multipart_config, target=FileUploadTarget.FILE_ATOM, ) except MetadataInconsistency as e: return [e]
def _get_token_client( credentials: Credentials, ssl_verify: bool, proxies: Dict[str, str], endpoint_url: str = None, ) -> SwaggerClient: config_api_url = credentials.api_url_opt or credentials.token_origin_address if proxies is None: verify_host_resolution(config_api_url) token_http_client = create_http_client(ssl_verify, proxies) return create_swagger_client( build_operation_url(endpoint_url or config_api_url, BACKEND_SWAGGER_PATH), token_http_client, )
def download_file_attribute( swagger_client: SwaggerClient, container_id: str, attribute: str, destination: Optional[str] = None, ): url = build_operation_url( swagger_client.swagger_spec.api_url, swagger_client.api.downloadAttribute.operation.path_name, ) response = _download_raw_data( http_client=swagger_client.swagger_spec.http_client, url=url, headers={"Accept": "application/octet-stream"}, query_params={"experimentId": container_id, "attribute": attribute}, ) _store_response_as_file(response, destination)
def create_artifacts_client(client_config: ClientConfig, http_client: HttpClient) -> SwaggerClient: return create_swagger_client( build_operation_url(client_config.api_url, ARTIFACTS_SWAGGER_PATH), http_client)
def create_leaderboard_client(client_config: ClientConfig, http_client: HttpClient) -> SwaggerClient: return create_swagger_client( build_operation_url(client_config.api_url, LEADERBOARD_SWAGGER_PATH), http_client, )
def create_backend_client(client_config: ClientConfig, http_client: HttpClient) -> SwaggerClient: return create_swagger_client( build_operation_url(client_config.api_url, BACKEND_SWAGGER_PATH), http_client)
def upload_file_set_attribute( swagger_client: SwaggerClient, container_id: str, attribute: str, file_globs: Iterable[str], reset: bool, multipart_config: Optional[MultipartConfig], ) -> List[NeptuneException]: unique_upload_entries = get_unique_upload_entries(file_globs) try: upload_configuration = DEFAULT_UPLOAD_CONFIG for package in split_upload_files( upload_entries=unique_upload_entries, upload_configuration=upload_configuration, ): if package.is_empty() and not reset: continue uploading_multiple_entries = package.len > 1 creating_a_single_empty_dir = ( package.len == 1 and not package.items[0].is_stream() and os.path.isdir(package.items[0].source_path) ) if ( uploading_multiple_entries or creating_a_single_empty_dir or package.is_empty() ): data = compress_to_tar_gz_in_memory(upload_entries=package.items) url = build_operation_url( swagger_client.swagger_spec.api_url, swagger_client.api.uploadFileSetAttributeTar.operation.path_name, ) result = upload_raw_data( http_client=swagger_client.swagger_spec.http_client, url=url, data=data, headers={"Content-Type": "application/octet-stream"}, query_params={ "experimentId": container_id, "attribute": attribute, "reset": str(reset), }, ) _attribute_upload_response_handler(result) else: upload_entry = package.items[0] if multipart_config is None: # the legacy upload procedure url = build_operation_url( swagger_client.swagger_spec.api_url, swagger_client.api.uploadFileSetAttributeChunk.operation.path_name, ) file_chunk_stream = FileChunkStream( upload_entry=upload_entry, upload_configuration=upload_configuration, ) _upload_loop( file_chunk_stream=file_chunk_stream, http_client=swagger_client.swagger_spec.http_client, url=url, query_params={ "experimentId": container_id, "attribute": attribute, "reset": str(reset), "path": upload_entry.target_path, }, ) else: _multichunk_upload( upload_entry, query_params={ "experimentIdentifier": container_id, "attribute": attribute, "subPath": upload_entry.target_path, }, swagger_client=swagger_client, multipart_config=multipart_config, target=FileUploadTarget.FILE_SET, ) reset = False except MetadataInconsistency as e: if len(e.args) == 1: return [e] else: return [MetadataInconsistency(desc) for desc in e.args]