def test_li_wand_2016_nst_smoke(subtests, mocker, content_image, style_image): spy = mocker.patch( mocks.make_mock_target("li_wand_2016", "_nst", "misc", "get_input_image"), wraps=get_input_image, ) mock = mocker.patch( mocks.make_mock_target("li_wand_2016", "_nst", "optim", "pyramid_image_optimization")) hyper_parameters = paper.hyper_parameters() paper.nst(content_image, style_image) args, kwargs = mock.call_args input_image, criterion, pyramid = args get_optimizer = kwargs["get_optimizer"] preprocessor = kwargs["preprocessor"] postprocessor = kwargs["postprocessor"] initial_resize = pyramid[-1].resize_image with subtests.test("input_image"): args = utils.call_args_to_namespace(spy.call_args, get_input_image) assert args.starting_point == hyper_parameters.nst.starting_point assert extract_image_size(args.content_image) == extract_image_size( initial_resize(content_image)) with subtests.test("style_image"): desired_style_image = preprocessor(initial_resize(style_image)) for loss in criterion.style_loss.children(): ptu.assert_allclose(loss.target_image, desired_style_image) with subtests.test("criterion"): assert isinstance(criterion, type(paper.perceptual_loss())) with subtests.test("pyramid"): assert isinstance(pyramid, type(paper.image_pyramid())) with subtests.test("optimizer"): assert is_callable(get_optimizer) optimizer = get_optimizer(input_image) assert isinstance(optimizer, type(paper.optimizer(input_image))) with subtests.test("preprocessor"): assert isinstance(preprocessor, type(paper.preprocessor())) with subtests.test("postprocessor"): assert isinstance(postprocessor, type(paper.postprocessor()))
def test_get_preprocessor_deprecation(mocker, framework): mock = mocker.patch( mocks.make_mock_target("enc", "prepostprocessing", "preprocessing")) with pytest.warns(UserWarning): enc.get_preprocessor(framework) mock.assert_called_with(framework)
def test_response_code(self, mocker, test_image_url, code, reason): side_effect = HTTPError(test_image_url, code, reason, {}, None) mocker.patch(make_mock_target("misc", "urlopen"), side_effect=side_effect) with pytest.raises(RuntimeError): misc.download_file(test_image_url)
def test_repr(self, pretrained, mocker, multi_layer_encoder_cls): mocker.patch( mocks.make_mock_target( "enc", "models", "utils", "ModelMultiLayerEncoder", "load_state_dict_from_url", )) framework = "framework" internal_preprocessing = False allow_inplace = True cls = functools.partial( multi_layer_encoder_cls, framework=framework, internal_preprocessing=internal_preprocessing, allow_inplace=allow_inplace, ) multi_layer_encoder = cls(pretrained=pretrained) assert_property_in_repr_ = functools.partial( asserts.assert_property_in_repr, repr(multi_layer_encoder)) if pretrained: assert_property_in_repr_("framework", framework) else: assert_property_in_repr_("pretrained", False) assert_property_in_repr_("internal_preprocessing", internal_preprocessing) assert_property_in_repr_("allow_inplace", allow_inplace)
def multi_layer_encoder(package_mocker): return mocks.patch_multi_layer_encoder_loader( targets=mocks.make_mock_target("ulyanov_et_al_2016", "_loss", "_multi_layer_encoder"), loader=paper.multi_layer_encoder, setups=((), {}), mocker=package_mocker, )
def test_MultiLayerEncoder_empty_storage(mocker, mle): mock = mocker.patch( mocks.make_mock_target("enc", "multi_layer_encoder", "MultiLayerEncoder", "clear_cache")) with pytest.warns(UserWarning): mle.empty_storage() mock.assert_called()
def patch_optim_loops(mocker): def optim_loop_side_effect(input, *args, **kwargs): return input for name in ( "default_image_optim_loop", "default_image_pyramid_optim_loop", "default_transformer_optim_loop", "default_transformer_epoch_optim_loop", ): mocker.patch( mocks.make_mock_target("optim", name), side_effect=optim_loop_side_effect ) # Since the beginner example "NST without pystiche" does not use a builtin # optimization loop we are patching the optimizer. The actual computation happens # inside a closure. Thus, the loop will run, albeit with an almost empty body. mocker.patch(mocks.make_mock_target("optim", "LBFGS", pkg="torch"))
def test_vgg_pretrained(mocker): state_dict = vgg(pretrained=False).state_dict() mocker.patch( mocks.make_mock_target("enc", "models", "vgg", "hub", "load_state_dict_from_url"), return_value=state_dict, ) model = vgg(pretrained=True) ptu.assert_allclose(model.state_dict(), state_dict)
def multi_layer_encoder(package_mocker): return mocks.patch_multi_layer_encoder_loader( targets=mocks.make_mock_target("johnson_alahi_li_2016", "_loss", "_multi_layer_encoder"), loader=paper.multi_layer_encoder, setups=((), { "impl_params": True }), mocker=package_mocker, )
def patch_model_optimization(): def model_optimization_side_effect(image_loader, transformer, criterion, *args, **kwargs): input_image = next(image_loader) criterion(input_image) return transformer for name in ("model_optimization", "multi_epoch_model_optimization"): mocker.patch( mocks.make_mock_target("optim", name), side_effect=model_optimization_side_effect, )
def patch_image_optimization(): def image_optimization_side_effect(input_image, criterion, *args, **kwargs): criterion(input_image) return input_image for name in ( "image_optimization", "pyramid_image_optimization", ): mocker.patch( mocks.make_mock_target("optim", name), side_effect=image_optimization_side_effect, )
def multi_layer_encoder(package_mocker): return mocks.patch_multi_layer_encoder_loader( targets=[ mocks.make_mock_target("gatys_et_al_2017", *path) for path in ( ("multi_layer_encoder", ), ("_loss", "_multi_layer_encoder"), ("_utils", "multi_layer_encoder_"), ) ], loader=paper.multi_layer_encoder, setups=((), {}), mocker=package_mocker, )
def patch_models_load_state_dict_from_url(package_mocker): mocks.patch_models_load_state_dict_from_url(mocker=package_mocker) # Since the beginner example "NST without pystiche" does not use a builtin # multi-layer encoder we are patching the model loader inplace. vgg_loader = _vgg def patched_vgg_loader(arch, cfg, batch_norm, pretrained, progress, **kwargs): return vgg_loader(arch, cfg, batch_norm, False, progress, **kwargs) package_mocker.patch( mocks.make_mock_target("models", "vgg", "_vgg", pkg="torchvision"), new=patched_vgg_loader, )
def patch_optimizer_step(): orig_loss = None def lbfgs_step_side_effect(closure): nonlocal orig_loss if orig_loss is not None: return orig_loss orig_loss = closure() return orig_loss mocker.patch( mocks.make_mock_target("optim", "LBFGS", "step", pkg="torch"), side_effect=lbfgs_step_side_effect, )
def multi_layer_encoder(package_mocker): setups = [((), {})] setups.extend([((), dict(impl_params=impl_params)) for impl_params in (True, False)]) return mocks.patch_multi_layer_encoder_loader( targets=[ mocks.make_mock_target("gatys_ecker_bethge_2016", *path) for path in ( ("_loss", "_multi_layer_encoder"), ("_utils", "multi_layer_encoder_"), ) ], loader=paper.multi_layer_encoder, setups=setups, mocker=package_mocker, )
def test_ModelMultiLayerEncoder_pretrained(mocker, multi_layer_encoder_cls): load_state_dict_from_url = mocker.patch( mocks.make_mock_target( "enc", "models", "utils", "ModelMultiLayerEncoder", "load_state_dict_from_url", )) framework = "framework" multi_layer_encoder_cls(pretrained=True, framework=framework, internal_preprocessing=False) load_state_dict_from_url.assert_called_once_with(framework)
def test_RandomNumIterationsBatchSampler(mocker): def randint(high, size, *args, **kwargs): if size: raise pytest.UsageError return torch.tensor(high, dtype=torch.long) mocker.patch( mocks.make_mock_target("data", "utils", "torch", "randint"), side_effect=randint, ) data_source = [None] * 3 num_iterations = 6 batch_size = 2 batch_sampler = utils.RandomNumIterationsBatchSampler( data_source, num_iterations, batch_size=batch_size, drop_last=True) actual = tuple(iter(batch_sampler)) expected = tuple([len(data_source)] * batch_size for _ in range(num_iterations // batch_size)) assert actual == expected
def multi_layer_encoder_cls( mocker, multi_layer_encoder_urls, multi_layer_encoder_modules, multi_layer_encoder_state_dict_key_map, ): class MockModelMultiLayerEncoder(enc.ModelMultiLayerEncoder): def state_dict_url(self, framework: str) -> str: return enc.select_url(multi_layer_encoder_urls, framework) def collect_modules(self, inplace: bool): return ( copy(multi_layer_encoder_modules), multi_layer_encoder_state_dict_key_map, ) mocker.patch( mocks.make_mock_target("enc", "models", "utils", "hub", "load_state_dict_from_url"), return_value=MockModelMultiLayerEncoder(pretrained=False).state_dict(), ) return MockModelMultiLayerEncoder
def test_image_download(mocker, image): mocker.patch(make_mock_target("misc", "urlopen"), wraps=rate_limited_urlopen) assert_image_downloads_correctly(image)
def save_state_dict(package_mocker): return package_mocker.patch(make_mock_target("utils", "save_state_dict"))
def write_image(package_mocker): return package_mocker.patch( make_mock_target("image", "write_image", pkg="pystiche") )
def mock(**kwargs): return mocker.patch(make_mock_target("_cli", "write_image"), **kwargs)
def mock(**kwargs): return mocker.patch(make_mock_target("_cli", "image_optimization"), **kwargs)
def patch(target, **kwargs): target = make_mock_target("johnson_alahi_li_2016", "_modules", target) with unittest.mock.patch(target, **kwargs) as mock: yield mock
def patch_matplotlib_figures(package_mocker): package_mocker.patch(mocks.make_mock_target("image", "show_image")) package_mocker.patch( mocks.make_mock_target("pyplot", "new_figure_manager", pkg="matplotlib") )