def test_cannot_resolve_host(self, gethostname_mock, _): # given gethostname_mock.side_effect = socket.gaierror # expect with self.assertRaises(CannotResolveHostname): HostedNeptuneBackend(credentials)
def test_max_compatible_version_fail(self, swagger_client_factory): # given self._get_swagger_client_mock(swagger_client_factory, max_compatible="0.4.999") # expect with self.assertRaises(UnsupportedClientVersion) as ex: HostedNeptuneBackend(credentials) self.assertTrue("Please install neptune-client==0.4.0" in str(ex.exception))
def test_limit_exceed_legacy(self, swagger_client_factory): # given swagger_client = self._get_swagger_client_mock(swagger_client_factory) backend = HostedNeptuneBackend(credentials) container_uuid = str(uuid.uuid4()) # when: error = MagicMock() error.json.return_value = {"title": "Monitoring hours not left"} swagger_client.api.executeOperations.side_effect = HTTPUnprocessableEntity( response=error ) # then: for container_type in self.container_types: with self.subTest(msg=f"For type {container_type.value}"): with self.assertRaises(NeptuneLimitExceedException): backend.execute_operations( container_id=container_uuid, container_type=container_type, operations=[ LogFloats(["float1"], [LogFloats.ValueType(1, 2, 3)]), ], )
def sync(path: Path, runs_names: List[str], project_name: Optional[str]): """Synchronizes runs with unsent data with the server. Neptune stores run data on disk in '.neptune' directories. In case a run executes offline or network is unavailable as the run executes, run data can be synchronized with the server with this command line utility. You can list unsynchronized runs with `neptune status` Examples: \b # Synchronize all runs in the current directory neptune sync \b # Synchronize all runs in the given path neptune sync --path foo/bar \b # Synchronize only runs "NPT-42" and "NPT-43" in "workspace/project" in the current directory neptune sync --run workspace/project/NPT-42 --run workspace/project/NPT-43 \b # Synchronise all runs in the current directory, sending offline runs to project "workspace/project" neptune sync --project workspace/project \b # Synchronize only the offline run with UUID offline/a1561719-b425-4000-a65a-b5efb044d6bb # to project "workspace/project" neptune sync --project workspace/project --run offline/a1561719-b425-4000-a65a-b5efb044d6bb """ # pylint: disable=global-statement global backend backend = HostedNeptuneBackend(Credentials.from_token()) if runs_names: sync_selected_runs(path, project_name, runs_names) else: sync_all_runs(path, project_name)
def status(path: Path) -> None: """List synchronized and unsynchronized runs in the given directory. Trashed runs are not listed. Neptune stores run data on disk in '.neptune' directories. In case a run executes offline or network is unavailable as the run executes, run data can be synchronized with the server with this command line utility. Examples: \b # List synchronized and unsynchronized runs in the current directory neptune status \b # List synchronized and unsynchronized runs in directory "foo/bar" without actually syncing neptune status --path foo/bar """ # pylint: disable=global-statement global backend backend = HostedNeptuneBackend(Credentials.from_token()) synchronization_status(path)
def test_execute_operations(self, upload_mock, swagger_client_factory): # given swagger_client = self._get_swagger_client_mock(swagger_client_factory) backend = HostedNeptuneBackend(credentials) container_uuid = str(uuid.uuid4()) response_error = MagicMock() response_error.errorDescription = "error1" swagger_client.api.executeOperations().response().result = [response_error] swagger_client.api.executeOperations.reset_mock() upload_mock.return_value = [FileUploadError("file1", "error2")] some_text = "Some streamed text" some_binary = b"Some streamed binary" for container_type in self.container_types: with self.subTest(msg=f"For type {container_type.value}"): upload_mock.reset_mock() swagger_client_factory.reset_mock() # when result = backend.execute_operations( container_id=container_uuid, container_type=container_type, operations=[ UploadFile( path=["some", "files", "some_file"], ext="", file_path="path_to_file", ), UploadFileContent( path=["some", "files", "some_text_stream"], ext="txt", file_content=base64_encode(some_text.encode("utf-8")), ), UploadFileContent( path=["some", "files", "some_binary_stream"], ext="bin", file_content=base64_encode(some_binary), ), LogFloats(["images", "img1"], [LogFloats.ValueType(1, 2, 3)]), AssignString(["properties", "name"], "some text"), UploadFile( path=["some", "other", "file.txt"], ext="txt", file_path="other/file/path.txt", ), ], ) # then swagger_client.api.executeOperations.assert_called_once_with( **{ "experimentId": str(container_uuid), "operations": [ { "path": "images/img1", "logFloats": { "entries": [ { "value": 1, "step": 2, "timestampMilliseconds": 3000, } ] }, }, { "path": "properties/name", "assignString": {"value": "some text"}, }, ], **DEFAULT_REQUEST_KWARGS, } ) upload_mock.assert_has_calls( [ call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/other/file.txt", source="other/file/path.txt", ext="txt", multipart_config=backend._client_config.multipart_config, ), call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/files/some_file", source="path_to_file", ext="", multipart_config=backend._client_config.multipart_config, ), call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/files/some_text_stream", source=some_text.encode("utf-8"), ext="txt", multipart_config=backend._client_config.multipart_config, ), call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/files/some_binary_stream", source=some_binary, ext="bin", multipart_config=backend._client_config.multipart_config, ), ], any_order=True, ) self.assertEqual( ( 6, [ FileUploadError("file1", "error2"), FileUploadError("file1", "error2"), FileUploadError("file1", "error2"), FileUploadError("file1", "error2"), MetadataInconsistency("error1"), ], ), result, )
def test_max_compatible_version_ok(self, swagger_client_factory): # given self._get_swagger_client_mock(swagger_client_factory, max_compatible="0.5.12") # expect HostedNeptuneBackend(credentials)
def test_track_to_existing_artifact( self, track_to_existing_artifact_mock, swagger_client_factory ): # given swagger_client = self._get_swagger_client_mock(swagger_client_factory) backend = HostedNeptuneBackend(credentials) container_id = str(uuid.uuid4()) project_id = str(uuid.uuid4()) response_error = MagicMock() response_error.errorDescription = "error1" swagger_client.api.executeOperations.return_value.response.return_value.result = [ response_error ] swagger_client.api.getArtifactAttribute.return_value.response.return_value.result.hash = ( "dummyHash" ) for container_type in self.container_types: track_to_existing_artifact_mock.reset_mock() swagger_client_factory.reset_mock() with self.subTest(msg=f"For type {container_type.value}"): track_to_existing_artifact_mock.reset_mock() swagger_client_factory.reset_mock() # when backend.execute_operations( container_id=container_id, container_type=container_type, operations=[ TrackFilesToArtifact( path=["sub", "one"], project_id=project_id, entries=[("/path/to/file", "/path/to")], ), TrackFilesToArtifact( path=["sub", "two"], project_id=project_id, entries=[ ("/path/to/file1", None), ("/path/to/file2", None), ], ), TrackFilesToArtifact( path=["sub", "three"], project_id=project_id, entries=[("/path/to/file1", None)], ), TrackFilesToArtifact( path=["sub", "three"], project_id=project_id, entries=[("/path/to/file2", None)], ), ], ) # then track_to_existing_artifact_mock.assert_has_calls( [ call( swagger_client=swagger_client, project_id=project_id, path=["sub", "one"], artifact_hash="dummyHash", parent_identifier=str(container_id), entries=[("/path/to/file", "/path/to")], default_request_params=DEFAULT_REQUEST_KWARGS, ), call( swagger_client=swagger_client, project_id=project_id, path=["sub", "two"], artifact_hash="dummyHash", parent_identifier=str(container_id), entries=[ ("/path/to/file1", None), ("/path/to/file2", None), ], default_request_params=DEFAULT_REQUEST_KWARGS, ), call( swagger_client=swagger_client, project_id=project_id, path=["sub", "three"], artifact_hash="dummyHash", parent_identifier=str(container_id), entries=[ ("/path/to/file1", None), ("/path/to/file2", None), ], default_request_params=DEFAULT_REQUEST_KWARGS, ), ], any_order=True, )
def test_upload_files_destination_path(self, upload_mock, swagger_client_factory): # given self._get_swagger_client_mock(swagger_client_factory) backend = HostedNeptuneBackend(credentials) container_uuid = str(uuid.uuid4()) for container_type in self.container_types: with self.subTest(msg=f"For type {container_type.value}"): upload_mock.reset_mock() swagger_client_factory.reset_mock() # when backend.execute_operations( container_id=container_uuid, container_type=container_type, operations=[ UploadFile( path=["some", "path", "1", "var"], ext="", file_path="/path/to/file", ), UploadFile( path=["some", "path", "2", "var"], ext="txt", file_path="/some.file/with.dots.txt", ), UploadFile( path=["some", "path", "3", "var"], ext="jpeg", file_path="/path/to/some_image.jpeg", ), ], ) # then upload_mock.assert_has_calls( [ call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/path/1/var", source="/path/to/file", ext="", multipart_config=backend._client_config.multipart_config, ), call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/path/2/var", source="/some.file/with.dots.txt", ext="txt", multipart_config=backend._client_config.multipart_config, ), call( swagger_client=backend.leaderboard_client, container_id=container_uuid, attribute="some/path/3/var", source="/path/to/some_image.jpeg", ext="jpeg", multipart_config=backend._client_config.multipart_config, ), ], any_order=True, )