def main():
    # We add the regularizer function to the model
    # The strength of regularizer is regulated by the
    # hyperparameter 'regularization_strength'.
    # Setting 'plot' to an integer automatically plots some default values
    # as well as the monitored circuit parameters. (Requires matplotlib).
    hyperparams = {
        'circuit': circuit_with_cost_function,
        'init_circuit_params': my_init_params,
        'task': 'optimization',
        'loss': myloss,
        'regularizer': myregularizer,
        'regularization_strength': 0.5,
        'optimizer': 'SGD',
        # 'init_learning_rate': 1e-7,
        'init_learning_rate': 1e-1,
        'log_every': 1,
        'plot': True
    }

    learner = CircuitLearner(hyperparams=hyperparams)

    learner.train_circuit(steps=50)

    # Print out the final parameters
    final_params = learner.get_circuit_parameters()
    # final_params is a dictionary
    for name, value in final_params.items():
        print("Parameter {} has the final value {}.".format(name, value))

    n_qmodes = 4
    all_results = []

    final_params_translated = []
    final_params_translated.append(final_params["regularized/squeeze_0"])
    final_params_translated.append(final_params["regularized/squeeze_1"])
    final_params_translated.append(final_params["regularized/squeeze_2"])
    final_params_translated.append(final_params["regularized/squeeze_3"])
    final_params_translated.append(final_params["regularized/displacement_0"])
    final_params_translated.append(final_params["regularized/displacement_1"])
    final_params_translated.append(final_params["regularized/displacement_2"])
    final_params_translated.append(final_params["regularized/displacement_3"])
    final_params_translated.append(final_params["regularized/bs_00"])
    final_params_translated.append(final_params["regularized/bs_01"])
    final_params_translated.append(final_params["regularized/bs_10"])
    final_params_translated.append(final_params["regularized/bs_11"])
    final_params_translated.append(final_params["regularized/bs_20"])
    final_params_translated.append(final_params["regularized/bs_21"])
    final_params_translated.append(final_params["regularized/bs_30"])
    final_params_translated.append(final_params["regularized/bs_31"])

    for i in range(1):
        bits = get_bits_from_circuit(A, n_qmodes, final_params_translated)
        string_bits = [str(bit) for bit in bits]
        all_results.append(",".join(string_bits))

    print(Counter(all_results))
示例#2
0
 def get_circuit_params(self, steps):
     learner = CircuitLearnerNUM(hyperparams=self.hyperp)
     learner.train_circuit(X=self.X, Y=self.Y, steps=steps)
     params = learner.get_circuit_parameters()
     param_value = params['regularized/dummy']
     return param_value
示例#3
0
 def get_cost(self, steps):
     learner = CircuitLearnerNUM(hyperparams=self.hyperp)
     learner.train_circuit(X=self.X, Y=self.Y, steps=steps)
     evalu = learner.score_circuit(X=self.X, Y=self.Y)
     cost = evalu['loss']
     return cost
示例#4
0
# We also print out the results every 10th step.
# Finally, we choose a warm start. This loads the final parameters from the previous training. You can see
# that the global step starts where it ended the last time you ran the script.
hyperparams = {'circuit': circuit,
               'init_circuit_params': my_init_params,
               'task': 'supervised',
               'loss': myloss,
               'optimizer': 'SGD',
               'init_learning_rate': 0.5,
               'decay': 0.01,
               'log_every': 10,
               'warm_start': False #Set this to True after first run
               }

# Create the learner
learner = CircuitLearner(hyperparams=hyperparams)

# Train the learner
learner.train_circuit(X=X_train, Y=Y_train, steps=steps, batch_size=batch_size)

# Evaluate the score of a test set
test_score = learner.score_circuit(X=X_test, Y=Y_test,
                                   outputs_to_predictions=outputs_to_predictions)
# The score_circuit() function returns a dictionary of different metrics. We select the accuracy and loss.
print("\nAccuracy on test set: {}".format(test_score['accuracy']))
print("Loss on test set: {}".format(test_score['loss']))

# Predict the labels of the new inputs
predictions = learner.run_circuit(X=X_pred,
                                  outputs_to_predictions=outputs_to_predictions)
print("\nPredictions for new inputs: ", predictions['outputs'])
示例#5
0
X_train = np.array([[0, 1], [0, 2], [0, 3], [0, 4]])

# Set the hyperparameters of the model and the training algorithm
hyperparams = {
    'circuit': circuit,
    'init_circuit_params': my_params,
    'task': 'unsupervised',
    'optimizer': 'Nelder-Mead',
    'loss': myloss,
    'regularizer': myregularizer,
    'regularization_strength': 0.1,
    'print_log': True,
    'log_every': 100
}

# Create the learner
learner = CircuitLearner(hyperparams=hyperparams)

# Train the learner
learner.train_circuit(X=X_train, steps=500)

# Get the final distribution, which is the circuit output
outcomes = learner.run_circuit()
final_distribution = outcomes['outputs']

# Use a helper function to sample fock states from this state.
# They should show a similar distribution to the training data
for i in range(10):
    sample = sample_from_distribution(distribution=final_distribution)
    print("Fock state sample {}:{}".format(i, sample))
示例#6
0
    state = eng.run('gaussian')

    # As the output we take the probability of measuring one photon in the mode
    circuit_output = state.fock_prob([1])
    return circuit_output


# Define a loss function on the outputs of circuit().
# We use the negative probability of measuring |1>
# so that minimization increases the probability.
def myloss(circuit_output):
    return -circuit_output


# Set the hyperparameters of the model and the training algorithm
hyperparams = {
    'circuit': circuit,
    'init_circuit_params': my_init_params,
    'task': 'optimization',
    'loss': myloss,
    'optimizer': 'SGD',
    'init_learning_rate': 0.1
}

# Create the learner
learner = CircuitLearner(hyperparams=hyperparams)

# Train the learner
learner.train_circuit(steps=50)
示例#7
0
# We add the regularizer function to the model
# The strength of regularizer is regulated by the
# hyperparameter 'regularization_strength'.
# Setting 'plot' to an integer automatically plots some default values
# as well as the monitored circuit parameters. (Requires matplotlib).
hyperparams = {
    'circuit': circuit,
    'init_circuit_params': my_init_params,
    'task': 'optimization',
    'loss': myloss,
    'regularizer': myregularizer,
    'regularization_strength': 0.5,
    'optimizer': 'SGD',
    'init_learning_rate': 0.1,
    'log_every': 1,
    'plot': True
}

learner = CircuitLearner(hyperparams=hyperparams)

learner.train_circuit(steps=50)

# Print out the final parameters
final_params = learner.get_circuit_parameters()
# final_params is a dictionary
for name, value in final_params.items():
    print("Parameter {} has the final value {}.".format(name, value))

# Look in the 'logsNUM' directory, there should be a file called 'log.csv' that records what happened to alpha
# during training. Play around with the 'regularization_strength' and see how a large strength forces alpha to zero.