def test_library(): mi = modelindex.load("tests/test-mi/18_library") assert mi.library is not None assert mi.library.name == "AllenNLP" assert mi.library.repository == "https://github.com/allenai/allennlp-models" assert mi.library.headline == "An Apache 2.0 NLP research library, built on PyTorch, for developing state-of-the-art deep learning models on a wide variety of linguistic tasks." assert mi.library.website == 'https://allennlp.org/' assert mi.library.docs == 'https://docs.allennlp.org/' assert mi.library.image == "images/allennlp.png" assert "**AllenNLP** is a Natural Language Processing library " in mi.library.readme_content() mi = modelindex.load("tests/test-mi/18_library/mi2.yml") assert mi.library is not None assert mi.library.name == "AllenNLP" assert mi.library.repository == "https://github.com/allenai/allennlp-models" assert mi.library.headline == "An Apache 2.0 NLP research library, built on PyTorch, for developing state-of-the-art deep learning models on a wide variety of linguistic tasks." assert mi.library.website == 'https://allennlp.org/' assert mi.library.docs == 'https://docs.allennlp.org/' assert mi.library.image == "images/allennlp.png" assert "**AllenNLP** is a great Natural Language Processing library " in mi.library.readme_content() mi = modelindex.load("tests/test-mi/18_library/mi3.yml") assert mi.library is not None assert mi.library.name == "AllenNLP" assert mi.library.repository == "https://github.com/allenai/allennlp-models" assert mi.library.headline == "An Apache 2.0 NLP research library, built on PyTorch, for developing state-of-the-art deep learning models on a wide variety of linguistic tasks." assert mi.library.website == 'https://allennlp.org/' assert mi.library.docs == 'https://docs.allennlp.org/' assert mi.library.image == "images/allennlp.png" assert "**AllenNLP** is a great Natural Language Processing library " in mi.library.readme_content() mi = modelindex.load("tests/test-mi/18_library/mi4.yml") assert mi.library is not None assert mi.library.name == "AllenNLP" assert mi.library.repository == "https://github.com/allenai/allennlp-models" assert mi.library.headline == "An Apache 2.0 NLP research library, built on PyTorch, for developing state-of-the-art deep learning models on a wide variety of linguistic tasks." assert mi.library.website == 'https://allennlp.org/' assert mi.library.docs == 'https://docs.allennlp.org/' assert mi.library.image == "images/allennlp.png" assert "**AllenNLP** is a great Natural Language Processing library " in mi.library.readme_content()
def test_collections_load(): mi = modelindex.load("tests/test-mi/03_col") assert "Models" in mi.data assert isinstance(mi.models, ModelList) assert isinstance(mi.collections, CollectionList)
def test_results_check(): mi = modelindex.load("tests/test-mi/06_import_meta/") s = str(mi.models[0].metadata) assert "Epochs=90" in s s = str(mi.models[1].metadata) assert "Epochs=120" in s assert "custom={'my custom parameter': 'abc'}" in s s = str(mi.models[0]) assert s == """Model( Name=Inception v3 - 90 epochs, Metadata=Metadata( Epochs=90, _filepath=tests/test-mi/06_import_meta/meta/meta1.yaml ), Results=[ Result( Task=Image Classification, Dataset=ImageNet, Metrics={'Top 1 Accuracy': '74.67%', 'Top 5 Accuracy': '92.1%'}, _filepath=tests/test-mi/06_import_meta/r1.yaml ), ], Weights=https://download.pytorch.org/models/inception_v3_google-1a9a5a14.pth, In Collection=Inception v3, _filepath=tests/test-mi/06_import_meta/m1.yml )""" s = str(mi.models[1]) assert s == """Model(
def test_wildcard_model_import(): mi = modelindex.load("tests/test-mi/13_wildcard_model_imports") assert len(mi.models) == 2 assert len(mi.collections) == 1 mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi2.yml") assert len(mi.models) == 2 assert len(mi.collections) == 0 mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi3.yml") assert len(mi.models) == 2 assert len(mi.collections) == 1 mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi4.yml") assert len(mi.models) == 2 assert len(mi.collections) == 1 mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi5.yml") assert len(mi.models) == 0 assert len(mi.collections) == 2 mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi6.yml") assert len(mi.models) == 2 assert len(mi.collections) == 1 mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi7.yml") assert len(mi.models) == 2 assert len(mi.collections) == 1 assert mi.models[0].name == "Inception v3 - 90 epochs" assert len(mi.models[0].results) == 2 assert mi.models[0].results[0].metrics["Top 1 Accuracy"] == "11%" assert mi.models[0].results[1].metrics["Top 1 Accuracy"] == "21%"
def test_readme_content(): mi = modelindex.load("tests/test-mi/15_readme_contents/") assert len(mi.models) == 2 assert mi.models[0].readme_content() == "This is some custom readme here." assert mi.models[1].readme_content() == "Another custom readme" mi = modelindex.load("tests/test-mi/15_readme_contents/mi2.yml") err = mi.check(silent=True) assert len(err) == 1 assert "docs/inception-v3-readme.md" in err[0] assert len(mi.collections) == 1 assert len(mi.models) == 2 assert "something here." in mi.models[0].readme_content() assert "# Second here" in mi.models[1].readme_content()
def test_yml_head(): mi = modelindex.load("tests/test-mi/19_yml_head/readme.md") assert len(mi.models) == 1 assert mi.models[0].name == "my model name" assert len(mi.models[0].results) == 1 assert mi.models[0].results[0].task == "Speech Recognition" assert mi.models[0].results[0].dataset == "Common Voice en" assert mi.models[0].results[0].metrics == {"Test WER": 10} mi = modelindex.load("tests/test-mi/19_yml_head/readme-noname.md") assert len(mi.models) == 1 assert mi.models[0].name is None assert len(mi.models[0].results) == 1 assert mi.models[0].results[0].task == "Speech Recognition" assert mi.models[0].results[0].dataset == "Common Voice en" assert mi.models[0].results[0].metrics == {"Test WER": 20}
def test_results_check(): mi = modelindex.load("tests/test-mi/16_images/") assert len(mi.collections) == 1 assert len(mi.models) == 2 err = mi.check(silent=True) assert len(err) == 2 assert "Image file nonexistant.png" in err[0] assert mi.collections[0].image == "images/image.png" mi = modelindex.load("tests/test-mi/16_images/mi2.yml") assert len(err) == 2 assert "Image file nonexistant.png" in err[0] assert mi.models[0].image == "../images/image.png" assert mi.models[1].image == "http://somewhere.com/external"
def test_models_import_wildcard(): mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi8.yml") err = mi.check(silent=True) assert len(err) == 1 assert "docs/inception-v3-readme.md" in err[0] assert len(mi.collections) == 1 assert len(mi.models) == 2 assert mi.models[0].readme == "models_md/m1.md" assert mi.models[1].readme == "models_md/m2.md" mi = modelindex.load("tests/test-mi/13_wildcard_model_imports/mi9.yml") assert len(mi.collections) == 1 assert len(mi.models) == 2 assert mi.models[0].readme == "models_md/m1.md" assert mi.models[1].readme == "models_md/m2.md"
def test_col_merge(): mi = modelindex.load("tests/test-mi/17_collections_merge") m1 = mi.models[0].full_model m2 = mi.models[1].full_model assert m1.metadata.training_data == "ImageNet" assert m2.metadata.training_data == "Reddit" assert len(m1.metadata.training_techniques) == 4 assert len(m2.metadata.training_techniques) == 5 assert m2.metadata.training_techniques[-1] == "Transformers" assert m1.readme == "docs/inception-v3-readme.md" assert m2.readme == "docs/inception-v3-readme-120.md" mi = modelindex.load("tests/test-mi/17_collections_merge/mi2.yml") m1 = mi.models[0].full_model m2 = mi.models[1].full_model assert len(m1.results) == 2 assert len(m2.results) == 2 assert m1.results[0].metrics["Top 1 Accuracy"] == "11%" assert m2.results[0].metrics["Top 1 Accuracy"] == "11%" assert m1.results[1].metrics["Top 1 Accuracy"] == "74.67%" assert m2.results[1].metrics["Top 1 Accuracy"] == "75.1%" mi = modelindex.load("tests/test-mi/17_collections_merge/mi3.yml") err = mi.check(silent=True) assert len(err) == 2 assert "Inception v3-1" in err[0] m1 = mi.models[0].full_model m2 = mi.models[1].full_model assert m1.metadata.training_data is None assert m2.metadata.training_data == "Reddit"
def test_models_imports_json(): mi = modelindex.load("tests/test-mi/08_import_models_json") assert "Models" in mi.data assert isinstance(mi.models, ModelList) assert len(mi.models) == 2 assert mi.models[0].metadata.data == {"Epochs": 90} assert mi.models[1].metadata.data == { "Epochs": 111, }
def test_any_file(): m1 = modelindex.load("tests/test-mi/07_import_models/model1.yaml") assert isinstance(m1, Model) assert m1.metadata.epochs == 90 assert len(m1.results) == 1 m2 = modelindex.load("tests/test-mi/07_import_models/model2.yaml") assert isinstance(m2, Model) assert m2.metadata.epochs == 120 assert len(m2.results) == 1 mi = modelindex.load("tests/test-mi/11_markdown/subdir/rexnet3.md") assert isinstance(mi, ModelIndex) assert mi.models[0].name == "RexNet3" rl = modelindex.load("tests/test-mi/09_check/results_list.yaml") assert isinstance(rl, ResultList) assert len(rl) == 3 assert rl[1].metrics["Top 1 Accuracy"] == "70.67%" assert len(rl[2].check_errors) == 1 assert "Metrics" in rl[2].check_errors[0] with pytest.raises(ValueError): modelindex.load("tests/test-mi/09_check/invalid_file.yml")
def test_models_imports(): mi = modelindex.load("tests/test-mi/07_import_models") assert "Models" in mi.data assert isinstance(mi.models, ModelList) assert len(mi.models) == 2 assert len(mi.collections) == 1 assert mi.models[0].metadata.data == {"Epochs": 90} assert mi.models[1].metadata.data == { "Epochs": 120, }
def test_metadata_imports(): mi = modelindex.load("tests/test-mi/06_import_meta") assert "Models" in mi.data assert isinstance(mi.models, ModelList) assert len(mi.models) == 2 assert mi.models[0].metadata.data == {"Epochs": 90} assert mi.models[1].metadata.data == { "Epochs": 120, "my custom parameter": "abc" }
def test_examples(): mi = modelindex.load("examples/option1-markdown") assert len(mi.collections) == 2 assert len(mi.models) == 3 assert mi.collections[0].name == "AlexNet" assert mi.collections[1].name == "ResNet" assert mi.models[0].code == "https://github.com/pytorch/vision/blob/5a315453da5089d66de94604ea49334a66552524/torchvision/models/alexnet.py#L53" assert mi.models[1].code == "https://github.com/pytorch/vision/blob/5a315453da5089d66de94604ea49334a66552524/torchvision/models/resnet.py#L304" assert mi.models[2].code == "https://github.com/pytorch/vision/blob/5a315453da5089d66de94604ea49334a66552524/torchvision/models/resnet.py#L316" mi = modelindex.load("examples/option2-yaml") assert len(mi.collections) == 2 assert len(mi.models) == 3 assert mi.collections[0].name == "AlexNet" assert mi.collections[1].name == "ResNet" assert mi.models[0].metadata.flops == 1429383808 assert mi.models[1].metadata.flops == 15667943424 assert mi.models[2].metadata.flops == 23117674496
def test_model_markdown(): mi = modelindex.load("tests/test-mi/11_markdown") assert len(mi.models) == 6 assert mi.models[0].name == "RexNet" assert mi.models[0].metadata.data["some field"] == 10 assert mi.models[1].name == "RexNet1" assert mi.models[1].metadata.data["some field"] == 11 assert mi.models[2].name == "RexNet2" assert mi.models[2].metadata.data["some field"] == 22 assert mi.models[3].name == "RexNet3" assert mi.models[3].results[0].metrics == {"mAP": "19%"} assert mi.models[4].name == "Inception v3 - 90 epochs" assert mi.models[5].name == "Inception v3 - 120 epochs"
def generate_readmes(templates_path: Path, dest_path: Path): """Add the code snippet template to the readmes""" readme_templates_path = templates_path / "models" code_template_path = templates_path / "code_snippets.md" env = Environment(loader=FileSystemLoader( [readme_templates_path, readme_templates_path.parent]), ) for readme in readme_templates_path.iterdir(): if readme.suffix == ".md": template = env.get_template(readme.name) # get the first model_name for this model family mi = modelindex.load(str(readme)) model_name = mi.models[0].name full_content = template.render(model_name=model_name) # generate full_readme with open(dest_path / readme.name, "w") as f: f.write(full_content)
def test_deepcopy(): mi = modelindex.load("tests/test-mi/03_col") m1 = mi.models[0] m2 = copy.deepcopy(m1) m2.name = "New name" assert m1.name != m2.name assert m2.name == "New name" m2.results[0].task = "New task" assert m1.results[0].task != m2.results[0].task assert m2.results[0].task == "New task" m2.results.data.append(Result(task="", dataset="", metrics={})) assert len(m1.results) == 1 assert len(m2.results) == 2 m2.metadata.flops = 10 assert m1.metadata.flops != m2.metadata.flops assert m2.metadata.flops == 10
def test_result_imports(): mi = modelindex.load("tests/test-mi/05_field_imports") assert "Models" in mi.data assert isinstance(mi.models, ModelList) assert len(mi.models) == 2 assert mi.models[0].results[0].data == { 'Task': 'Image Classification', 'Dataset': 'ImageNet', 'Metrics': { 'Top 1 Accuracy': '74.67%', 'Top 5 Accuracy': '92.1%' } } assert mi.models[1].results[0].data == { 'Task': 'Image Classification', 'Dataset': 'ImageNet', 'Metrics': { 'Top 1 Accuracy': '75.1%', 'Top 5 Accuracy': '93.1%' } }
def test_common_dict_list_errors(): mi = modelindex.load("tests/test-mi/14_common") assert mi.models[1].metadata.epochs == 130 assert mi.models[0].results[0].task == "Image Classification" assert mi.models[0].results[0].metrics["Top 1 Accuracy"] == "11.67%"
def test_results_check(): mi = modelindex.load("tests/test-mi/09_check/mi1.yml") e = mi.models[0].results[0].check_errors assert len(e) == 1 assert "dataset" in e[0].lower() msgs = mi.check(silent=True) assert len(msgs) == 1 assert "Results[0]" in msgs[0] mi = modelindex.load("tests/test-mi/09_check/mi2.yml") e = mi.models[0].results[0].check_errors assert len(e) == 0 mi = modelindex.load("tests/test-mi/09_check/mi3.yml") e = mi.models[0].results[0].check_errors assert len(e) == 2 assert "dataset" in e[0].lower() assert "metrics" in e[1].lower() mi = modelindex.load("tests/test-mi/09_check/mi4.yml") e = mi.models[0].results[0].check_errors assert len(e) == 2 assert "dataset" in e[0].lower() assert "metrics" in e[1].lower() assert len(mi.models[0].metadata.check_errors) == 0 assert len(mi.models[0].check_errors) == 0 mi = modelindex.load("tests/test-mi/09_check/mi5.yml") assert len(mi.models[0].check_errors) == 1 assert "what" in mi.models[0].check_errors[0] assert "not exist" in mi.models[0].check_errors[0] msgs = mi.check(silent=True) assert len(msgs) == 3 mi = modelindex.load("tests/test-mi/09_check/mi6.yml") assert len(mi.models[0].check_errors) == 2 assert "what" in mi.models[0].check_errors[0] assert "not exist" in mi.models[0].check_errors[0] assert "README" in mi.models[0].check_errors[1] assert len(mi.models[0].results.check_errors) == 1 assert "django" in mi.models[0].results.check_errors[0] assert "not exist" in mi.models[0].results.check_errors[0] mi = modelindex.load("tests/test-mi/09_check/mi7.yml") assert len(mi.check_errors) == 1 assert "not exist" in mi.check_errors[0] mi = modelindex.load("tests/test-mi/10_import_meta_check") assert len(mi.models) == 2 assert len(mi.collections) == 1 assert mi.models[0].filepath.endswith("meta/m1.yml") msgs = mi.check(silent=True) assert "noexist.json" in msgs[0] assert "wrongsubdir" in msgs[1] mi = modelindex.load("tests/test-mi/09_check/mi8.yml") assert len(mi.models[0].check_errors) == 1 assert "what" in mi.models[0].check_errors[0] assert "not exist" in mi.models[0].check_errors[0]
def test_singlefile(): mi = modelindex.load("tests/test-mi/01_base") assert "Models" in mi.data assert isinstance(mi.models, ModelList)
def test_wildcard_import(): mi = modelindex.load("tests/test-mi/12_wildcard_imports") assert len(mi.models) == 2
def test_imports(): mi = modelindex.load("tests/test-mi/04_imports") assert "Models" in mi.data assert isinstance(mi.models, ModelList) assert len(mi.models) == 2