def test_011_success_download(self): name = 'r941_min_high_g351' model_file = models.resolve_model(name) tmp_file = "{}.tmp".format(model_file) os.rename(model_file, tmp_file) new_file = models.resolve_model(name) self.assertTrue(os.path.isfile(new_file)) os.remove(new_file) os.rename(tmp_file, model_file)
def test_000_total_bundled_size(self): total = 0 for name in medaka.options.current_models: model_file = models.resolve_model(name) total += os.path.getsize(model_file) self.assertLess(total / 1024 / 1024, 45, "Bundled model file size too large")
def test_999_load_all_models(self): for name in medaka.options.allowed_models: model_file = models.resolve_model(name) with medaka.models.open_model(model_file) as ds: model = ds.load_model() self.assertIsInstance(model, tensorflow.keras.models.Model) feature_encoder = ds.get_meta('feature_encoder') self.assertIsInstance(feature_encoder, BaseFeatureEncoder) label_scheme = ds.get_meta('label_scheme') self.assertIsInstance(label_scheme, BaseLabelScheme)
def test_999_load_all_models(self): for name in medaka.options.allowed_models: model_file = models.resolve_model(name) model = medaka.models.open_model(model_file).load_model() self.assertIsInstance(model, tensorflow.keras.models.Model) # Check we can get necessary functions for inference with DataStore(model_file) as ds: feature_encoder = ds.get_meta('feature_encoder') self.assertIsInstance(feature_encoder, BaseFeatureEncoder) label_scheme = ds.get_meta('label_scheme') self.assertIsInstance(label_scheme, BaseLabelScheme)
def test_010_failed_download(self): name = 'garbage' medaka.options.allowed_models.append(name) with self.assertRaises(medaka.models.DownloadError): models.resolve_model(name) medaka.options.allowed_models.pop()