Beispiel #1
0
hyperparams = {
    'circuit': circuit,
    'task': 'supervised',
    'loss': myloss,
    'optimizer': 'SGD',
    'init_learning_rate': 0.5,
    'decay': 0.01,
    'print_log': True,
    'log_every': 10,
    'warm_start': False
}

learner = CircuitLearner(hyperparams=hyperparams)

learner.train_circuit(X=X_train, Y=Y_train, steps=steps, batch_size=batch_size)

test_score = learner.score_circuit(
    X=X_test, Y=Y_test, outputs_to_predictions=outputs_to_predictions)
# The score_circuit() function returns a dictionary of different metrics.
print("\nPossible scores to print: {}".format(list(test_score.keys())))
# We select the accuracy and loss.
print("Accuracy on test set: ", test_score['accuracy'])
print("Loss on test set: ", test_score['loss'])

outcomes = learner.run_circuit(X=X_pred,
                               outputs_to_predictions=outputs_to_predictions)
# The run_circuit() function returns a dictionary of different outcomes.
print("\nPossible outcomes to print: {}".format(list(outcomes.keys())))
# We select the predictions
print("Predictions for new inputs: {}".format(outcomes['predictions']))
Beispiel #2
0
    return tf.nn.l2_loss(regularized_params)


X_train = np.array([[0, 1], [0, 2], [0, 3], [0, 4]])

hyperparams = {
    'circuit': circuit,
    'task': 'unsupervised',
    'optimizer': 'SGD',
    'init_learning_rate': 0.1,
    'loss': myloss,
    'regularizer': myregularizer,
    'regularization_strength': 0.1
}

learner = CircuitLearner(hyperparams=hyperparams)

learner.train_circuit(X=X_train, steps=steps)

outcomes = learner.run_circuit()
final_distribution = outcomes['outputs']

# Use a helper function to sample fock states from this state.
# They should show a similar distribution to the training data
for i in range(10):
    sample = sample_from_distribution(distribution=final_distribution)
    print("Fock state sample {}:{} \n".format(i, sample))

# Print out the final circuit parameters
learner.get_circuit_parameters(only_print=True)