def test_config(make_model): # Disable the redefined-outer-name violation in this function # pylint: disable=redefined-outer-name model = make_model expected_num_parameters = model.layers[0].count_params() # Serialize model and use config to create new layer model_config = model.get_config() layer_config = model_config['layers'][1]['config'] if 'mpo' in model.layers[0].name: new_model = DenseMPO.from_config(layer_config) elif 'decomp' in model.layers[0].name: new_model = DenseDecomp.from_config(layer_config) elif 'condenser' in model.layers[0].name: new_model = DenseCondenser.from_config(layer_config) elif 'expander' in model.layers[0].name: new_model = DenseExpander.from_config(layer_config) elif 'entangler' in model.layers[0].name: new_model = DenseEntangler.from_config(layer_config) # Build the layer so we can count params below new_model.build(layer_config['batch_input_shape']) # Check that original layer had same num params as layer built from config np.testing.assert_equal(expected_num_parameters, new_model.count_params())
def test_entangler_asymmetric_num_parameters_output_shape( num_legs, num_levels, leg_dims): leg_dim, out_leg_dim = leg_dims data_shape = (leg_dim**num_legs, ) model = Sequential() model.add( DenseEntangler(out_leg_dim**num_legs, num_legs=num_legs, num_levels=num_levels, use_bias=True, activation='relu', input_shape=data_shape)) primary = leg_dim secondary = out_leg_dim if leg_dim > out_leg_dim: primary, secondary = secondary, primary expected_num_parameters = (num_levels - 1) * (num_legs - 1) * ( primary**4 ) + (num_legs - 2) * primary**3 * secondary + primary**2 * secondary**2 + ( out_leg_dim**num_legs) np.testing.assert_equal(expected_num_parameters, model.count_params()) data = np.random.randint(10, size=(10, data_shape[0])) out = model(data) np.testing.assert_equal(out.shape, (data.shape[0], out_leg_dim**num_legs))
def make_high_dim_model(high_dim_data, request): # Disable the redefined-outer-name violation in this function # pylint: disable=redefined-outer-name data = high_dim_data if request.param == 'DenseMPO': model = Sequential() model.add( DenseMPO(data.shape[-1], num_nodes=int(math.log(int(data.shape[-1]), 8)), bond_dim=8, use_bias=True, activation='relu', input_shape=(data.shape[-1], ))) elif request.param == 'DenseDecomp': model = Sequential() model.add( DenseDecomp(512, decomp_size=128, use_bias=True, activation='relu', input_shape=(data.shape[-1], ))) elif request.param == 'DenseCondenser': model = Sequential() model.add( DenseCondenser(exp_base=2, num_nodes=3, use_bias=True, activation='relu', input_shape=(data.shape[-1], ))) elif request.param == 'DenseExpander': model = Sequential() model.add( DenseExpander(exp_base=2, num_nodes=3, use_bias=True, activation='relu', input_shape=(data.shape[-1], ))) elif request.param == 'DenseEntangler': num_legs = 3 leg_dim = round(data.shape[-1]**(1. / num_legs)) assert leg_dim**num_legs == data.shape[-1] model = Sequential() model.add( DenseEntangler(leg_dim**num_legs, num_legs=num_legs, num_levels=3, use_bias=True, activation='relu', input_shape=(data.shape[-1], ))) return data, model
def test_entangler_num_parameters(dummy_data): # Disable the redefined-outer-name violation in this function # pylint: disable=redefined-outer-name data, _ = dummy_data num_legs = 3 num_levels = 3 leg_dim = round(data.shape[-1]**(1. / num_legs)) assert leg_dim**num_legs == data.shape[-1] model = Sequential() model.add( DenseEntangler(leg_dim**num_legs, num_legs=num_legs, num_levels=num_levels, use_bias=True, activation='relu', input_shape=(data.shape[1], ))) # num_params = entangler_node_params + bias_params expected_num_parameters = num_levels * (num_legs - 1) * (leg_dim**4) + ( leg_dim**num_legs) np.testing.assert_equal(expected_num_parameters, model.count_params())