def test_evaluate_named_model(data_dir, mocker): x = numpy.random.randint(256, size=(100, 48, 48, 3)).astype(numpy.float64) y = numpy.random.randint(4, size=(100, )) meanscsv = str(data_dir.join("zoolander_means.csv")) with open(meanscsv, "w") as csvfile: writer = csv.writer(csvfile) writer.writerow([125.3, 127.12, 121.9]) expected_samples = x.copy() expected_samples[:, :, :, 0] = (expected_samples[:, :, :, 0] - 125.3 + 255.0) / (2.0 * 255.0) expected_samples[:, :, :, 1] = (expected_samples[:, :, :, 1] - 127.12 + 255.0) / (2.0 * 255.0) expected_samples[:, :, :, 2] = (expected_samples[:, :, :, 2] - 121.9 + 255.0) / (2.0 * 255.0) expected_targets = keras.utils.to_categorical(y, 4) with mocker.patch("keras_resnet.models.ResNet50") as model_mock: keras_resnet.models.ResNet50.return_value = model_mock resources = mocker.patch("pkg_resources.resource_filename") resources.side_effect = lambda _, filename: str( data_dir.join(os.path.basename(filename))) model = deepometry.model.Model(shape=(48, 48, 3), units=4, name="zoolander") model.compile() model.evaluate(x, y, batch_size=10, verbose=0) model_mock.load_weights.assert_called_once_with( pkg_resources.resource_filename( "deepometry", os.path.join("data", "zoolander_checkpoint.hdf5"))) model_mock.evaluate.assert_called_once_with(x=mocker.ANY, y=mocker.ANY, batch_size=10, verbose=0) _, kwargs = model_mock.evaluate.call_args samples = kwargs["x"] assert samples.shape == expected_samples.shape numpy.testing.assert_array_equal(samples, expected_samples) targets = kwargs["y"] assert targets.shape == expected_targets.shape numpy.testing.assert_array_equal(targets, expected_targets)
def test_evaluate_named_directory(data_dir, mocker): x = numpy.random.randint(256, size=(100, 48, 48, 3)).astype(numpy.float64) y = numpy.random.randint(4, size=(100, )) model_directory = data_dir.mkdir("models") meanscsv = str(model_directory.join("means.csv")) with open(meanscsv, "w") as csvfile: writer = csv.writer(csvfile) writer.writerow([125.3, 127.12, 121.9]) expected_samples = x.copy() expected_samples[:, :, :, 0] -= 125.3 expected_samples[:, :, :, 1] -= 127.12 expected_samples[:, :, :, 2] -= 121.9 expected_targets = keras.utils.to_categorical(y, 4) with mocker.patch("keras_resnet.models.ResNet50") as model_mock: keras_resnet.models.ResNet50.return_value = model_mock model = deepometry.model.Model(shape=(48, 48, 3), units=4, directory=str(model_directory)) model.compile() model.evaluate(x, y, batch_size=10, verbose=0) model_mock.load_weights.assert_called_once_with( os.path.join(str(model_directory), "checkpoint.hdf5")) model_mock.evaluate.assert_called_once_with(x=mocker.ANY, y=mocker.ANY, batch_size=10, verbose=0) _, kwargs = model_mock.evaluate.call_args samples = kwargs["x"] assert samples.shape == expected_samples.shape numpy.testing.assert_array_equal(samples, expected_samples) targets = kwargs["y"] assert targets.shape == expected_targets.shape numpy.testing.assert_array_equal(targets, expected_targets)
def _evaluate(x, y, units, batch_size, directory, name, verbose): import deepometry.model model = deepometry.model.Model(directory=directory, name=name, shape=x.shape[1:], units=units) model.compile() metrics = model.evaluate(x, y, batch_size=batch_size, verbose=verbose) return model.model.metrics_names, metrics