def test_number_of_function_calls(self, fun, x_min, param, nums_freq, exp_num_calls, substep_optimizer, substep_kwargs): """Tests that per parameter 2R+1 function calls are used for an update step.""" global num_calls num_calls = 0 @functools.wraps(fun) def _fun(*args, **kwargs): global num_calls num_calls += 1 return fun(*args, **kwargs) opt = RotosolveOptimizer(substep_optimizer, substep_kwargs) # Make only the first argument trainable param = (np.array(param[0], requires_grad=True), ) + param[1:] # Only one argument is marked as trainable -> Expect only the executions for that arg new_param = opt.step(_fun, *param, nums_frequency=nums_freq) exp_num_calls_single_trainable = sum( 2 * num + 1 for num in nums_freq["x"].values()) assert num_calls == exp_num_calls_single_trainable num_calls = 0 # Parameters are now marked as trainable -> Expect full number of executions param = tuple(np.array(p, requires_grad=True) for p in param) new_param = opt.step(_fun, *param, nums_frequency=nums_freq) assert num_calls == exp_num_calls
def test_number_of_function_calls( self, fun, x_min, param, num_freq, optimizer, optimizer_kwargs ): """Tests that per parameter 2R+1 function calls are used for an update step.""" global num_calls num_calls = 0 def _fun(*args, **kwargs): global num_calls num_calls += 1 return fun(*args, **kwargs) opt = RotosolveOptimizer() new_param = opt.step( _fun, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) expected_num_calls = np.sum( np.fromiter(_flatten(expand_num_freq(num_freq, param)), dtype=int) * 2 + 1 ) assert num_calls == expected_num_calls
def test_single_step(self, qnode, param, num_freq, optimizer, optimizer_kwargs): opt = RotosolveOptimizer() repack_param = len(param) == 1 new_param_step = opt.step( qnode, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) if repack_param: new_param_step = (new_param_step,) assert (np.isscalar(new_param_step) and np.isscalar(param)) or len(new_param_step) == len( param ) new_param_step_and_cost, old_cost = opt.step_and_cost( qnode, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) if repack_param: new_param_step_and_cost = (new_param_step_and_cost,) assert np.allclose( np.fromiter(_flatten(new_param_step_and_cost), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), ) assert np.isclose(qnode(*param), old_cost)
def test_full_output(self, fun, x_min, param, num_freq, optimizer, optimizer_kwargs): """Tests the ``full_output`` feature of Rotosolve, delivering intermediate cost function values at the univariate optimization substeps.""" opt = RotosolveOptimizer() _, y_output_step = opt.step( fun, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, full_output=True, ) new_param, old_cost, y_output_step_and_cost = opt.step_and_cost( fun, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, full_output=True, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step = (new_param,) expected_intermediate_x = successive_params(param, new_param) expected_y_output = [fun(*par) for par in expected_intermediate_x[1:]] assert np.allclose(y_output_step, expected_y_output) assert np.allclose(y_output_step_and_cost, expected_y_output) assert np.isclose(old_cost, fun(*expected_intermediate_x[0]))
def test_single_step(self, qnode, param, nums_frequency, spectra, substep_optimizer, substep_kwargs): """Test executing a single step of the RotosolveOptimizer on a QNode.""" param = tuple(np.array(p, requires_grad=True) for p in param) opt = RotosolveOptimizer(substep_optimizer, substep_kwargs) repack_param = len(param) == 1 new_param_step = opt.step( qnode, *param, nums_frequency=nums_frequency, spectra=spectra, ) if repack_param: new_param_step = (new_param_step, ) assert (np.isscalar(new_param_step) and np.isscalar(param)) or len(new_param_step) == len(param) new_param_step_and_cost, old_cost = opt.step_and_cost( qnode, *param, nums_frequency=nums_frequency, spectra=spectra, ) if repack_param: new_param_step_and_cost = (new_param_step_and_cost, ) assert np.allclose( np.fromiter(_flatten(new_param_step_and_cost), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), ) assert np.isclose(qnode(*param), old_cost)
def test_wrong_typed_num_freqs(fun, param, num_freq): """Test that an error is raised for a non-integer entry in the numbers of frequencies.""" opt = RotosolveOptimizer() with pytest.raises(ValueError, match="The numbers of frequencies are expected to be integers."): opt.step(fun, *param, num_freqs=num_freq)
def test_wrong_len_num_freqs(fun, param, num_freq): """Test that an error is raised for a different number of numbers of frequencies than number of function arguments.""" opt = RotosolveOptimizer() with pytest.raises(ValueError, match="The length of the provided numbers of frequencies"): opt.step(fun, *param, num_freqs=num_freq)
def test_wrong_num_of_num_freqs_per_parameter(fun, param, num_freq): """Test that an error is raised for a different number of numbers of frequencies than number of function arguments.""" opt = RotosolveOptimizer() with pytest.raises(ValueError, match="The number of the frequency counts"): opt.step(fun, *param, num_freqs=num_freq)
def test_single_step_convergence(self, fun, x_min, param, nums_freq, exp_num_calls, substep_optimizer, substep_kwargs): """Tests convergence for easy classical functions in a single Rotosolve step. Includes testing of the parameter output shape and the old cost when using step_and_cost.""" opt = RotosolveOptimizer(substep_optimizer, substep_kwargs) # Make only the first argument trainable param = (np.array(param[0], requires_grad=True), ) + param[1:] # Only one argument is marked as trainable -> All other arguments have to stay fixed new_param_step = opt.step( fun, *param, nums_frequency=nums_freq, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step = (new_param_step, ) assert all( np.allclose(p, new_p) for p, new_p in zip(param[1:], new_param_step[1:])) # With trainable parameters, training should happen param = tuple(np.array(p, requires_grad=True) for p in param) new_param_step = opt.step( fun, *param, nums_frequency=nums_freq, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step = (new_param_step, ) assert len(x_min) == len(new_param_step) assert np.allclose( np.fromiter(_flatten(x_min), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), atol=1e-5, ) # Now with step_and_cost and trainable params new_param_step_and_cost, old_cost = opt.step_and_cost( fun, *param, nums_frequency=nums_freq, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step_and_cost = (new_param_step_and_cost, ) assert len(x_min) == len(new_param_step_and_cost) assert np.allclose( np.fromiter(_flatten(new_param_step_and_cost), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), atol=1e-5, ) assert np.isclose(old_cost, fun(*param))
def test_error_missing_frequency_info(): """Test that an error is raised if neither nums_frequency nor spectra is given.""" opt = RotosolveOptimizer() fun = lambda x: x x = np.array(0.5, requires_grad=True) with pytest.raises(ValueError, match="Neither the number of frequencies nor the"): opt.step(fun, x)
def test_error_no_trainable_args(): """Test that an error is raised if none of the arguments is trainable.""" opt = RotosolveOptimizer() fun = lambda x, y, z: 1.0 x = np.arange(4, requires_grad=False) y = np.arange(2, requires_grad=False) z = [1.2, -0.4, -9.1] with pytest.raises(ValueError, match="Found no parameters to optimize."): opt.step(fun, x, nums_frequency=None, spectra=None)
def test_no_error_missing_frequency_info_untrainable(): """Test that no error is raised if neither nums_frequency nor spectra is given for a parameter not marked as trainable.""" opt = RotosolveOptimizer() fun = lambda x, y: x x = np.array(0.5, requires_grad=True) y = np.array(0.1, requires_grad=False) nums_frequency = {"x": {(): 1}} opt.step(fun, x, y, nums_frequency=nums_frequency)
def test_error_missing_frequency_info_single_par(): """Test that an error is raised if neither nums_frequency nor spectra is given for one of the function arguments.""" opt = RotosolveOptimizer() fun = lambda x: qml.math.sum(x) x = np.arange(4, requires_grad=True) nums_frequency = {"x": {(0, ): 1, (1, ): 1}} spectra = {"x": {(0, ): [0.0, 1.0], (2, ): [0.0, 1.0]}} # For the first three entries either nums_frequency or spectra is provided with pytest.raises(ValueError, match=r"was provided for the entry \(3,\)"): opt.step(fun, x, nums_frequency=nums_frequency, spectra=spectra)
class A: sgd_opt = GradientDescentOptimizer(stepsize) mom_opt = MomentumOptimizer(stepsize, momentum=gamma) nesmom_opt = NesterovMomentumOptimizer(stepsize, momentum=gamma) adag_opt = AdagradOptimizer(stepsize) rms_opt = RMSPropOptimizer(stepsize, decay=gamma) adam_opt = AdamOptimizer(stepsize, beta1=gamma, beta2=delta) rotosolve_opt = RotosolveOptimizer() rotoselect_opt = RotoselectOptimizer()
def test_multiple_steps(self, qnode, param, num_freq, optimizer, optimizer_kwargs): opt = RotosolveOptimizer() repack_param = len(param) == 1 initial_cost = qnode(*param) for _ in range(3): param = opt.step( qnode, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) # The following accounts for the unpacking functionality for length-1 param if repack_param: param = (param,) assert qnode(*param) < initial_cost
def test_single_step_convergence( self, fun, x_min, param, num_freq, optimizer, optimizer_kwargs ): """Tests convergence for easy classical functions in a single Rotosolve step. Includes testing of the parameter output shape and the old cost when using step_and_cost.""" opt = RotosolveOptimizer() new_param_step = opt.step( fun, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step = (new_param_step,) assert len(x_min) == len(new_param_step) assert np.allclose( np.fromiter(_flatten(x_min), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), atol=1e-5, ) new_param_step_and_cost, old_cost = opt.step_and_cost( fun, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step_and_cost = (new_param_step_and_cost,) assert len(x_min) == len(new_param_step_and_cost) assert np.allclose( np.fromiter(_flatten(new_param_step_and_cost), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), atol=1e-5, ) assert np.isclose(old_cost, fun(*param))
def test_single_step(self, fun, x_min, param, num_freq): """Tests convergence for easy classical functions in a single Rotosolve step with some arguments deactivated for training. Includes testing of the parameter output shape and the old cost when using step_and_cost.""" substep_optimizer = "brute" substep_kwargs = None opt = RotosolveOptimizer(substep_optimizer, substep_kwargs) new_param_step = opt.step( fun, *param, nums_frequency=num_freq, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step = (new_param_step, ) assert len(x_min) == len(new_param_step) assert np.allclose( np.fromiter(_flatten(x_min), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), atol=1e-5, ) new_param_step_and_cost, old_cost = opt.step_and_cost( fun, *param, nums_frequency=num_freq, ) # The following accounts for the unpacking functionality for length-1 param if len(param) == 1: new_param_step_and_cost = (new_param_step_and_cost, ) assert len(x_min) == len(new_param_step_and_cost) assert np.allclose( np.fromiter(_flatten(new_param_step_and_cost), dtype=float), np.fromiter(_flatten(new_param_step), dtype=float), atol=1e-5, ) assert np.isclose(old_cost, fun(*param))
def test_multiple_steps(fun, x_min, param, num_freq): """Tests that repeated steps execute as expected.""" param = tuple(np.array(p, requires_grad=True) for p in param) substep_optimizer = "brute" substep_kwargs = None opt = RotosolveOptimizer(substep_optimizer, substep_kwargs) for _ in range(3): param = opt.step( fun, *param, nums_frequency=num_freq, ) # The following accounts for the unpacking functionality for length-one param if len(x_min) == 1: param = (param, ) assert (np.isscalar(x_min) and np.isscalar(param)) or len(x_min) == len(param) assert np.allclose( np.fromiter(_flatten(x_min), dtype=float), np.fromiter(_flatten(param), dtype=float), atol=1e-5, )
def test_multiple_steps(self, qnode, param, nums_frequency, spectra, substep_optimizer, substep_kwargs): """Test executing multiple steps of the RotosolveOptimizer on a QNode.""" param = tuple(np.array(p, requires_grad=True) for p in param) # For the following 1D substep_optimizer, the bounds need to be expanded for these QNodes if substep_optimizer in ["shgo", custom_optimizer]: substep_kwargs["bounds"] = ((-2.0, 2.0), ) opt = RotosolveOptimizer(substep_optimizer, substep_kwargs) repack_param = len(param) == 1 initial_cost = qnode(*param) for _ in range(3): param = opt.step( qnode, *param, nums_frequency=nums_frequency, spectra=spectra, ) # The following accounts for the unpacking functionality for length-1 param if repack_param: param = (param, ) assert qnode(*param) < initial_cost
def test_multiple_steps(fun, x_min, param, num_freq): """Tests that repeated steps execute as expected.""" opt = RotosolveOptimizer() optimizer = "brute" optimizer_kwargs = None for _ in range(3): param = opt.step( fun, *param, num_freqs=num_freq, optimizer=optimizer, optimizer_kwargs=optimizer_kwargs, ) # The following accounts for the unpacking functionality for length-1 param if len(x_min) == 1: param = (param,) assert (np.isscalar(x_min) and np.isscalar(param)) or len(x_min) == len(param) assert np.allclose( np.fromiter(_flatten(x_min), dtype=float), np.fromiter(_flatten(param), dtype=float), atol=1e-5, )
def opt(opt_name): if opt_name == "gd": return GradientDescentOptimizer(stepsize) if opt_name == "nest": return NesterovMomentumOptimizer(stepsize, momentum=gamma) if opt_name == "moment": return MomentumOptimizer(stepsize, momentum=gamma) if opt_name == "ada": return AdagradOptimizer(stepsize) if opt_name == "rms": return RMSPropOptimizer(stepsize, decay=gamma) if opt_name == "adam": return AdamOptimizer(stepsize, beta1=gamma, beta2=delta) if opt_name == "roto": return RotosolveOptimizer()
def reset(opt): if getattr(opt, "reset", None): opt.reset() @pytest.mark.parametrize( "opt, opt_name", [ (GradientDescentOptimizer(stepsize), "gd"), (MomentumOptimizer(stepsize, momentum=gamma), "moment"), (NesterovMomentumOptimizer(stepsize, momentum=gamma), "nest"), (AdagradOptimizer(stepsize), "ada"), (RMSPropOptimizer(stepsize, decay=gamma), "rms"), (AdamOptimizer(stepsize, beta1=gamma, beta2=delta), "adam"), (RotosolveOptimizer(), "roto"), ], ) class TestOverOpts: """Tests keywords, multiple arguements, and non-training arguments in relevent optimizers""" def test_kwargs(self, mocker, opt, opt_name, tol): """Test that the keywords get passed and alter the function""" class func_wrapper: @staticmethod def func(x, c=1.0): return (x - c)**2 x = 1.0 wrapper = func_wrapper() spy = mocker.spy(wrapper, "func")