Exemplo n.º 1
0
def test_config(make_model):
  # Disable the redefined-outer-name violation in this function
  # pylint: disable=redefined-outer-name
  model = make_model

  expected_num_parameters = model.layers[0].count_params()

  # Serialize model and use config to create new layer
  model_config = model.get_config()
  layer_config = model_config['layers'][0]['config']
  if 'mpo' in model.layers[0].name:
    new_model = DenseMPO.from_config(layer_config)
  elif 'decomp' in model.layers[0].name:
    new_model = DenseDecomp.from_config(layer_config)
  elif 'condenser' in model.layers[0].name:
    new_model = DenseCondenser.from_config(layer_config)
  elif 'expander' in model.layers[0].name:
    new_model = DenseExpander.from_config(layer_config)
  elif 'entangler' in model.layers[0].name:
    new_model = DenseEntangler.from_config(layer_config)

  # Build the layer so we can count params below
  new_model.build(layer_config['batch_input_shape'])

  # Check that original layer had same num params as layer built from config
  np.testing.assert_equal(expected_num_parameters, new_model.count_params())
Exemplo n.º 2
0
def make_model(dummy_data, request):
  # Disable the redefined-outer-name violation in this function
  # pylint: disable=redefined-outer-name
  data, _ = dummy_data

  if request.param == 'DenseMPO':
    model = Sequential()
    model.add(
        DenseMPO(data.shape[1],
                 num_nodes=int(math.log(int(data.shape[1]), 8)),
                 bond_dim=8,
                 use_bias=True,
                 activation='relu',
                 input_shape=(data.shape[1],)))
    model.add(Dense(1, activation='sigmoid'))
  elif request.param == 'DenseDecomp':
    model = Sequential()
    model.add(
        DenseDecomp(512,
                    decomp_size=128,
                    use_bias=True,
                    activation='relu',
                    input_shape=(data.shape[1],)))
    model.add(Dense(1, activation='sigmoid'))
  elif request.param == 'DenseCondenser':
    model = Sequential()
    model.add(
        DenseCondenser(exp_base=2,
                       num_nodes=3,
                       use_bias=True,
                       activation='relu',
                       input_shape=(data.shape[1],)))
    model.add(Dense(1, activation='sigmoid'))
  elif request.param == 'DenseExpander':
    model = Sequential()
    model.add(
        DenseExpander(exp_base=2,
                      num_nodes=3,
                      use_bias=True,
                      activation='relu',
                      input_shape=(data.shape[-1],)))
    model.add(Dense(1, activation='sigmoid'))
  elif request.param == 'DenseEntangler':
    num_legs = 3
    leg_dim = round(data.shape[-1]**(1. / num_legs))
    assert leg_dim**num_legs == data.shape[-1]

    model = Sequential()
    model.add(
        DenseEntangler(leg_dim**num_legs,
                       num_legs=num_legs,
                       num_levels=3,
                       use_bias=True,
                       activation='relu',
                       input_shape=(data.shape[1],)))
    model.add(Dense(1, activation='sigmoid'))

  return model
Exemplo n.º 3
0
def test_expander_num_parameters(dummy_data):
  # Disable the redefined-outer-name violation in this function
  # pylint: disable=redefined-outer-name
  data, _ = dummy_data
  exp_base = 2
  num_nodes = 3
  model = Sequential()
  model.add(
      DenseExpander(exp_base=exp_base,
                    num_nodes=num_nodes,
                    use_bias=True,
                    activation='relu',
                    input_shape=(data.shape[-1],)))

  output_dim = data.shape[-1] * (exp_base**num_nodes)

  # num_params = (num_nodes * num_node_params) + num_bias_params
  expected_num_parameters = (num_nodes * data.shape[-1] * data.shape[-1] *
                             exp_base) + output_dim

  np.testing.assert_equal(expected_num_parameters, model.count_params())