def __init__(self, root, project_name, experiment_name, params, hyperparams): super(NeptuneLogger, self).__init__(root, project_name, experiment_name, params, hyperparams) import neptune.new as neptune from neptune.new.types import File self._File = File neptune_params = params["neptune"] workspace = neptune_params["workspace"] project = neptune_params["project"] source_files = neptune_params["source_files"] run_id = neptune_params.get("id", False) if run_id: self.run = neptune.init(project=f"{workspace}/{project}", run=run_id, source_files=source_files) else: self.run = neptune.init(project=f"{workspace}/{project}", source_files=source_files) self.run["sys/tags"].add(neptune_params["tags"]) self.run["parameters"] = params self.run["hyperparameters"] = hyperparams
def test_offline_sync(self, environment): with tmp_context() as tmp: # create run in offline mode run = neptune.init( mode="offline", project=environment.project, **DISABLE_SYSLOG_KWARGS, ) # assign some values key = self.gen_key() val = fake.word() run[key] = val # and stop it run.stop() # run asynchronously result = runner.invoke(sync, ["--path", tmp, "-p", environment.project]) assert result.exit_code == 0 # offline mode doesn't support custom_run_id, we'll have to parse sync output to determine short_id sys_id_found = re.search(self.SYNCHRONIZED_SYSID_RE, result.stdout) assert len(sys_id_found.groups()) == 1 sys_id = sys_id_found.group(1) run2 = neptune.init(run=sys_id, project=environment.project) assert run2[key].fetch() == val
def test_entrypoint_without_common_root(self): exp = init(mode="debug", source_files=["../*"]) self.assertEqual(exp["source_code/entrypoint"].fetch(), "/home/user/main_dir/main.py") exp = init(mode="debug", source_files=["internal/*"]) self.assertEqual(exp["source_code/entrypoint"].fetch(), "/home/user/main_dir/main.py")
def test_resuming_run(self, environment): exp = neptune.init(project=environment.project) key = self.gen_key() val = fake.word() exp[key] = val exp.sync() exp.stop() # pylint: disable=protected-access exp2 = neptune.init(run=exp._short_id, project=environment.project) assert exp2[key].fetch() == val
def test_custom_run_id(self, environment): custom_run_id = "-".join((fake.word() for _ in range(3))) run = neptune.init(custom_run_id=custom_run_id, project=environment.project) key = self.gen_key() val = fake.word() run[key] = val run.sync() run.stop() exp2 = neptune.init(custom_run_id=custom_run_id, project=environment.project) assert exp2[key].fetch() == val
def __init__( self, base_namespace=None, api_token=None, project=None, run=None, log_batch_metrics: bool = SETTINGS.log_batch_metrics, log_epoch_metrics: bool = SETTINGS.log_epoch_metrics, **neptune_run_kwargs, ): super().__init__( log_batch_metrics=log_batch_metrics, log_epoch_metrics=log_epoch_metrics ) if base_namespace is None: self.base_namespace = "experiment" else: self.base_namespace = base_namespace self._api_token = api_token self._project = project self._neptune_run_kwargs = neptune_run_kwargs if run is None: self.run = neptune.init( project=self._project, api_token=self._api_token, **self._neptune_run_kwargs, ) else: self.run = run try: import catalyst.__version__ as version self.run["source_code/integrations/neptune-catalyst"] = version except (ImportError, NameError, AttributeError): pass
def test_assign_dict(self): exp = init(mode="debug", flush_period=0.5) now = datetime.now() exp.assign( { "x": 5, "metadata": {"name": "Trol", "age": 376}, "toys": StringSeries(["cudgel", "hat"]), "nested": {"nested": {"deep_secret": FloatSeries([13, 15])}}, "simple_types": { "int": 42, "str": "imagine", "float": 3.14, "datetime": now, "list": list(range(10)), }, } ) self.assertEqual(exp["x"].fetch(), 5) self.assertEqual(exp["metadata/name"].fetch(), "Trol") self.assertEqual(exp["metadata/age"].fetch(), 376) self.assertEqual(exp["toys"].fetch_last(), "hat") self.assertEqual(exp["nested/nested/deep_secret"].fetch_last(), 15) self.assertEqual(exp["simple_types/int"].fetch(), 42) self.assertEqual(exp["simple_types/str"].fetch(), "imagine") self.assertEqual(exp["simple_types/float"].fetch(), 3.14) self.assertEqual( exp["simple_types/datetime"].fetch(), now.replace(microsecond=1000 * int(now.microsecond / 1000)), ) self.assertEqual(exp["simple_types/list"].fetch(), str(list(range(10))))
def test_pop_namespace(self): exp = init(mode="debug", flush_period=0.5) exp.define("some/path/subpath/num", Float(3)) exp.define("some/path/text", String("Some text")) exp.define("some/otherpath", Float(4)) exp.pop("some/path") self.assertTrue("path" not in exp.get_structure()["some"])
def test_pop(self): exp = init(mode="debug", flush_period=0.5) exp.define("some/path/num", Float(3)) exp.define("some/path/text", String("Some text")) exp.pop("some/path/text") self.assertTrue("num" in exp.get_structure()["some"]["path"]) self.assertTrue("text" not in exp.get_structure()["some"]["path"])
def test_assign_copy_to_existing(self): exp = init(mode="debug") exp["some/path/num"] = 42 exp["copied/path"] = 54 exp["copied/path"] = exp["some/path/num"] exp.wait() self.assertEqual(42, exp["copied/path"].fetch())
def test_entrypoint_in_interactive_python(self): exp = init(mode="debug") with self.assertRaises(AttributeError): exp["source_code/entrypoint"].get() exp = init(mode="debug", source_files=[]) with self.assertRaises(AttributeError): exp["source_code/entrypoint"].get() exp = init(mode="debug", source_files=["../*"]) with self.assertRaises(AttributeError): exp["source_code/entrypoint"].get() exp = init(mode="debug", source_files=["internal/*"]) with self.assertRaises(AttributeError): exp["source_code/entrypoint"].get()
def __init__(self, base_namespace=None, api_token=None, project=None, run=None, **neptune_run_kwargs): if base_namespace is None: self.base_namespace = "experiment" else: self.base_namespace = base_namespace self._api_token = api_token self._project = project self._neptune_run_kwargs = neptune_run_kwargs if run is None: self.run = neptune.init(project=self._project, api_token=self._api_token, **self._neptune_run_kwargs) else: self.run = run try: import catalyst.__version__ as version self.run["source_code/integrations/neptune-catalyst"] = version except (ImportError, NameError, AttributeError): pass
def test_multiple_runs_single(self, container: neptune.Run, environment): # pylint: disable=protected-access,undefined-loop-variable number_of_reinitialized = 5 namespace = fake.unique.word() reinitialized_runs = [ neptune.init(run=container._short_id, project=environment.project) for _ in range(number_of_reinitialized) ] container[f"{namespace}/{fake.unique.word()}"] = fake.color() container.sync() random.shuffle(reinitialized_runs) for run in reinitialized_runs: run[f"{namespace}/{fake.unique.word()}"] = fake.color() random.shuffle(reinitialized_runs) for run in reinitialized_runs: run.sync() container.sync() assert len(container[namespace].fetch()) == number_of_reinitialized + 1
def test_pop(self): exp = init(mode="debug", flush_period=0.5) exp["some/num/val"].assign(3, wait=True) self.assertIn("some", exp.get_structure()) ns = exp["some"] ns.pop("num/val", wait=True) self.assertNotIn("some", exp.get_structure())
def test_log_value_errors(self): exp = init(mode="debug", flush_period=0.5) img = FileVal.as_image(PIL.Image.new("RGB", (60, 30), color="red")) with self.assertRaises(ValueError): exp["x"].log([]) with self.assertRaises(ValueError): exp["x"].log([5, "str"]) with self.assertRaises(ValueError): exp["x"].log([5, 10], step=10) exp["some/num/val"].log([5], step=1) exp["some/num/val"].log([]) with self.assertRaises(ValueError): exp["some/num/val"].log("str") with self.assertRaises(TypeError): exp["some/num/val"].log(img) exp["some/str/val"].log(["str"], step=1) exp["some/str/val"].log([]) exp["some/img/val"].log([img], step=1) exp["some/img/val"].log([]) with self.assertRaises(TypeError): exp["some/img/val"].log(5) with self.assertRaises(FileNotFound): exp["some/img/val"].log("path") self.assertEqual(exp["some"]["num"]["val"].fetch_last(), 5) self.assertEqual(exp["some"]["str"]["val"].fetch_last(), "str") self.assertIsInstance(exp.get_structure()["some"]["img"]["val"], FileSeries)
def test_assign_namespace(self): exp = init(mode="debug", flush_period=0.5) exp["some/namespace"].assign( NamespaceVal({ "sub-namespace/val1": 1.0, "sub-namespace/val2": StringSetVal(["tag1", "tag2"]), })) self.assertEqual(exp["some/namespace/sub-namespace/val1"].fetch(), 1.0) self.assertEqual(exp["some/namespace/sub-namespace/val2"].fetch(), {"tag1", "tag2"}) self.assertIsInstance( exp.get_structure()["some"]["namespace"]["sub-namespace"]["val1"], Float) self.assertIsInstance( exp.get_structure()["some"]["namespace"]["sub-namespace"]["val2"], StringSet) exp["some"].assign(NamespaceVal({"namespace/sub-namespace/val1": 2.0})) self.assertEqual(exp["some/namespace/sub-namespace/val1"].fetch(), 2.0) self.assertEqual(exp["some/namespace/sub-namespace/val2"].fetch(), {"tag1", "tag2"}) self.assertIsInstance( exp.get_structure()["some"]["namespace"]["sub-namespace"]["val1"], Float) self.assertIsInstance( exp.get_structure()["some"]["namespace"]["sub-namespace"]["val2"], StringSet) with self.assertRaises(TypeError): exp["some"].assign( NamespaceVal( {"namespace/sub-namespace/val1": {"tagA", "tagB"}}))
def _init_run_instance(self, api_key, project, name, run, neptune_run_kwargs) -> Run: if run is not None: run_instance = run else: try: run_instance = neptune.init( project=project, api_token=api_key, name=name, **neptune_run_kwargs, ) except NeptuneLegacyProjectException as e: raise TypeError( f"""Project {project} has not been migrated to the new structure. You can still integrate it with the Neptune logger using legacy Python API available as part of neptune-contrib package: - https://docs-legacy.neptune.ai/integrations/pytorch_lightning.html\n """) from e # make sure that we've log integration version for both newly created and outside `Run` instances run_instance[_INTEGRATION_VERSION_KEY] = __version__ # keep api_key and project, they will be required when resuming Run for pickled logger self._api_key = api_key self._project_name = run_instance._project_name # skipcq: PYL-W0212 return run_instance
def test_sync_run(self, environment): custom_run_id = "-".join((fake.word() for _ in range(3))) with tmp_context() as tmp: # with test values key = self.gen_key() original_value = fake.word() updated_value = fake.word() # init run run = neptune.init( custom_run_id=custom_run_id, project=environment.project, **DISABLE_SYSLOG_KWARGS, ) def get_next_run(): return neptune.init( custom_run_id=custom_run_id, project=environment.project, **DISABLE_SYSLOG_KWARGS, ) self._test_sync( exp=run, get_next_exp=get_next_run, path=tmp, key=key, original_value=original_value, updated_value=updated_value, )
def on_experiment_start(self): """Start of experiment.""" self.experiment = neptune.init(project=self.project_dir, api_token=self.api_token, tags=self.tags, name=self.experiment_name) self.experiment["params"] = self.params
def test_representation(self): exp = init(mode="debug", flush_period=0.5) exp["params/int"] = 1 exp["params/float"] = 3.14 exp["params/bool"] = True exp["params/datetime"] = datetime.now() exp["params/sub-namespace/int"] = 42 exp["params/sub-namespace/string"] = "Some text" self.assertEqual('<Namespace field at "params">', repr(exp["params"])) self.assertEqual('<Integer field at "params/int">', repr(exp["params/int"])) self.assertEqual('<Float field at "params/float">', repr(exp["params/float"])) self.assertEqual('<Boolean field at "params/bool">', repr(exp["params/bool"])) self.assertEqual('<Datetime field at "params/datetime">', repr(exp["params/datetime"])) self.assertEqual('<Unassigned field at "params/unassigned">', repr(exp["params/unassigned"])) sub_namespace = exp["params/sub-namespace"] self.assertEqual('<Integer field at "params/sub-namespace/int">', repr(sub_namespace["int"])) self.assertEqual( '<String field at "params/sub-namespace/string">', repr(sub_namespace["string"]), ) self.assertEqual( '<Unassigned field at "params/sub-namespace/unassigned">', repr(sub_namespace["unassigned"]), )
def test_sync_mode(self): exp = init(mode="sync") exp["some/variable"] = 13 exp["copied/variable"] = exp["some/variable"] self.assertEqual(13, exp["some/variable"].fetch()) self.assertEqual(13, exp["copied/variable"].fetch()) self.assertNotIn(str(exp._id), os.listdir(".neptune"))
def __init__(self, category, train_params, hyper): self.run = neptune.init( project='apalese/TestingNNRetrieval', api_token= 'eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vYXBwLm5lcHR1bmUuYWkiLCJhcGlfdXJsIjoiaHR0cHM6Ly9hcHAubmVwdHVuZS5haSIsImFwaV9rZXkiOiI1YmE0NjU5Ni1lZDk4LTRkMmEtYjIwYS02YjM5NGJmYWNlMmQifQ==', ) self.run["Targets"] = f"{category}" self.run["JIRA"] = "NPT-952" self.run["algorithm"] = "TripletLossTest" self.run["parameters"] = hyper self.run["train parameters"] = train_params self.recalls = { 1: 'test/Recall_1', 5: 'test/Recall_5', 10: 'test/Recall_10', 20: 'test/Recall_20' } self.update_recalls = { 1: 'test/score1', 5: 'test/score5', 10: 'test/score10', 20: 'test/score20' }
def test_del(self): exp = init(mode="debug", flush_period=0.5) exp["some/num/val"].assign(3) self.assertIn("some", exp.get_structure()) ns = exp["some"] del ns["num/val"] self.assertNotIn("some", exp.get_structure())
def __init__(self): super().__init__() self.exp = neptune.init(source_files="alpha_integration_dev/*.py") # download sources self.exp.sync() with self.with_check_if_file_appears("files.zip"): self.exp[SOURCE_CODE_FILES_ATTRIBUTE_PATH].download()
def test_define_few_variables(self): exp = init(mode="debug", flush_period=0.5) exp.define("some/path/num", Float(3)) exp.define("some/path/text", String("Some text"), wait=True) self.assertEqual(exp.get_structure()["some"]["path"]["num"].fetch(), 3) self.assertEqual( exp.get_structure()["some"]["path"]["text"].fetch(), "Some text" )
def test_offline_mode_for_run(self): run = init(name=self.PROJECT_NAME, mode="offline") run["some/variable"] = 13 with self.assertRaises(NeptuneOfflineModeFetchException): run["some/variable"].fetch() self.assertIn(str(run._id), os.listdir(".neptune/offline")) self.assertIn("data-1.log", os.listdir(".neptune/offline/{}".format(run._id)))
def test_run_as_handler(self): exp = init(mode="debug", flush_period=0.5) exp.define("some/path/num", Float(3)) exp.define("some/path/text", String("Some text")) handler = exp["some/path"] exp.wait() self.assertEqual(handler["num"].fetch(), 3) self.assertEqual(handler["text"].fetch(), "Some text")
def test_offline_mode(self): exp = init(mode="offline") exp["some/variable"] = 13 with self.assertRaises(NeptuneOfflineModeFetchException): exp["some/variable"].fetch() self.assertIn(str(exp._id), os.listdir(".neptune/offline")) self.assertIn("data-1.log", os.listdir(".neptune/offline/{}".format(exp._id)))
def test_entrypoint(self): exp = init(mode="debug") self.assertEqual(exp["source_code/entrypoint"].fetch(), "main.py") exp = init(mode="debug", source_files=[]) self.assertEqual(exp["source_code/entrypoint"].fetch(), "main.py") exp = init(mode="debug", source_files=["../*"]) self.assertEqual(exp["source_code/entrypoint"].fetch(), "main_dir/main.py") exp = init(mode="debug", source_files=["internal/*"]) self.assertEqual(exp["source_code/entrypoint"].fetch(), "main.py") exp = init(mode="debug", source_files=["../other_dir/*"]) self.assertEqual(exp["source_code/entrypoint"].fetch(), "../main_dir/main.py")
def activate_monitoring(user, project): """ Return a valid Neptune run instance :param user: name of the Neptune.ai user :param project: project to log metrics into :return: Neptune.new.run object """ return neptune.init(project=f'{user}/{project}', source_files=['*.py', 'requirements.txt'])