def setUp(self): super().setUp() aqua_globals.random_seed = self.random_seed = 1376 self.training_data = { 'A': np.asarray([[2.95309709, 2.51327412], [3.14159265, 4.08407045]]), 'B': np.asarray([[4.08407045, 2.26194671], [4.46106157, 2.38761042]]) } self.testing_data = { 'A': np.asarray([[3.83274304, 2.45044227]]), 'B': np.asarray([[3.89557489, 0.31415927]]) } self.ref_opt_params = np.array([ 10.03814083, -12.22048954, -7.58026833, -2.42392954, 12.91555293, 13.44064652, -2.89951454, -10.20639406, 0.81414546, -1.00551752, -4.7988307, 14.00831419, 8.26008064, -7.07543736, 11.43368677, -5.74857438 ]) self.ref_train_loss = 0.69366523 self.ref_prediction_a_probs = [[0.79882812, 0.20117188]] self.ref_prediction_a_label = [0] self.vqc_input = ClassificationInput(self.training_data, self.testing_data)
def test_vqc_with_raw_feature_vector_on_wine(self): """ vqc with raw features vector on wine test """ feature_dim = 4 # dimension of each data point training_dataset_size = 8 testing_dataset_size = 4 _, training_input, test_input, _ = _wine_data( training_size=training_dataset_size, test_size=testing_dataset_size, n=feature_dim ) params = { 'problem': {'name': 'classification', 'random_seed': self.seed, 'skip_qobj_validation': True }, 'algorithm': {'name': 'VQC'}, 'backend': {'provider': 'qiskit.BasicAer', 'name': 'statevector_simulator'}, 'optimizer': {'name': 'COBYLA', 'maxiter': 100}, 'variational_form': {'name': 'RYRZ', 'depth': 3}, 'feature_map': {'name': 'RawFeatureVector', 'feature_dimension': feature_dim} } result = run_algorithm(params, ClassificationInput(training_input, test_input)) self.log.debug(result['testing_accuracy']) self.assertGreater(result['testing_accuracy'], 0.8)
def test_qsvm_binary_via_run_algorithm(self): training_input = {'A': np.asarray([[0.6560706, 0.17605998], [0.14154948, 0.06201424], [0.80202323, 0.40582692], [0.46779595, 0.39946754], [0.57660199, 0.21821317]]), 'B': np.asarray([[0.38857596, -0.33775802], [0.49946978, -0.48727951], [-0.30119743, -0.11221681], [-0.16479252, -0.08640519], [0.49156185, -0.3660534]])} test_input = {'A': np.asarray([[0.57483139, 0.47120732], [0.48372348, 0.25438544], [0.08791134, 0.11515506], [0.45988094, 0.32854319], [0.53015085, 0.41539212]]), 'B': np.asarray([[-0.06048935, -0.48345293], [-0.01065613, -0.33910828], [-0.17323832, -0.49535592], [0.14043268, -0.87869109], [-0.15046837, -0.47340207]])} total_array = np.concatenate((test_input['A'], test_input['B'])) params = { 'problem': {'name': 'classification', 'random_seed': self.random_seed}, 'backend': {'shots': self.shots}, 'algorithm': { 'name': 'QSVM' } } backend = BasicAer.get_backend('qasm_simulator') algo_input = ClassificationInput(training_input, test_input, total_array) result = run_algorithm(params, algo_input, backend=backend) self.assertEqual(result['testing_accuracy'], 0.6) self.assertEqual(result['predicted_classes'], ['A', 'A', 'A', 'A', 'A', 'A', 'B', 'A', 'A', 'A'])
def test_qsvm_multiclass_error_correcting_code(self): backend = BasicAer.get_backend('qasm_simulator') training_input = {'A': np.asarray([[0.6560706, 0.17605998], [0.25776033, 0.47628296], [0.8690704, 0.70847635]]), 'B': np.asarray([[0.38857596, -0.33775802], [0.49946978, -0.48727951], [0.49156185, -0.3660534]]), 'C': np.asarray([[-0.68088231, 0.46824423], [-0.56167659, 0.65270294], [-0.82139073, 0.29941512]])} test_input = {'A': np.asarray([[0.57483139, 0.47120732], [0.48372348, 0.25438544], [0.48142649, 0.15931707]]), 'B': np.asarray([[-0.06048935, -0.48345293], [-0.01065613, -0.33910828], [0.06183066, -0.53376975]]), 'C': np.asarray([[-0.74561108, 0.27047295], [-0.69942965, 0.11885162], [-0.66489165, 0.1181712]])} total_array = np.concatenate((test_input['A'], test_input['B'], test_input['C'])) params = { 'problem': {'name': 'classification', 'random_seed': self.random_seed}, 'algorithm': { 'name': 'QSVM', }, 'backend': {'shots': self.shots}, 'multiclass_extension': {'name': 'ErrorCorrectingCode', 'code_size': 5}, 'feature_map': {'name': 'SecondOrderExpansion', 'depth': 2, 'entangler_map': [[0, 1]]} } algo_input = ClassificationInput(training_input, test_input, total_array) result = run_algorithm(params, algo_input, backend=backend) self.assertAlmostEqual(result['testing_accuracy'], 0.444444444, places=4) self.assertEqual(result['predicted_classes'], ['A', 'A', 'C', 'A', 'A', 'A', 'A', 'C', 'C'])
def test_vqc_on_wine(self): """ vqc on wine test """ feature_dim = 4 # dimension of each data point training_dataset_size = 8 testing_dataset_size = 4 random_seed = 10598 np.random.seed(random_seed) _, training_input, test_input, _ = _wine_data( training_size=training_dataset_size, test_size=testing_dataset_size, n=feature_dim ) params = { 'problem': {'name': 'classification', 'random_seed': self.random_seed, 'circuit_caching': True, 'skip_qobj_deepcopy': True, 'skip_qobj_validation': True, 'circuit_cache_file': None, }, 'algorithm': {'name': 'VQC'}, 'backend': {'provider': 'qiskit.BasicAer', 'name': 'statevector_simulator'}, 'optimizer': {'name': 'COBYLA', 'maxiter': 200}, 'variational_form': {'name': 'RYRZ', 'depth': 3}, } result = run_algorithm(params, ClassificationInput(training_input, test_input)) self.log.debug(result['testing_accuracy']) self.assertLess(result['testing_accuracy'], 0.6)
def setUp(self): super().setUp() self.random_seed = 10598 self.shots = 12000 np.random.seed(self.random_seed) self.training_data = {'A': np.asarray([[2.95309709, 2.51327412], [3.14159265, 4.08407045]]), 'B': np.asarray([[4.08407045, 2.26194671], [4.46106157, 2.38761042]])} self.testing_data = {'A': np.asarray([[3.83274304, 2.45044227]]), 'B': np.asarray([[3.89557489, 0.31415927]])} self.svm_input = ClassificationInput(self.training_data, self.testing_data)
def test_vqc_with_raw_feature_vector_on_wine(self): feature_dim = 4 # dimension of each data point training_dataset_size = 8 testing_dataset_size = 4 random_seed = 10598 np.random.seed(random_seed) sample_total, training_input, test_input, class_labels = wine_data( training_size=training_dataset_size, test_size=testing_dataset_size, n=feature_dim) # TODO: cache only work with optimization_level 0 params = { 'problem': { 'name': 'classification', 'random_seed': self.random_seed, 'circuit_optimization_level': 0, 'circuit_caching': True, 'skip_qobj_deepcopy': True, 'skip_qobj_validation': True, 'circuit_cache_file': None, }, 'algorithm': { 'name': 'VQC' }, 'backend': { 'provider': 'qiskit.BasicAer', 'name': 'statevector_simulator' }, 'optimizer': { 'name': 'COBYLA', 'maxiter': 200 }, 'variational_form': { 'name': 'RYRZ', 'depth': 3 }, 'feature_map': { 'name': 'RawFeatureVector', 'feature_dimension': feature_dim } } result = run_algorithm(params, ClassificationInput(training_input, test_input)) self.log.debug(result['testing_accuracy']) self.assertGreater(result['testing_accuracy'], 0.8)
def test_vqc_statevector_via_run_algorithm(self): """ vqc statevector via run algorithm test """ params = { 'problem': {'name': 'classification', 'random_seed': 10598, 'skip_qobj_validation': True }, 'algorithm': {'name': 'VQC'}, 'backend': {'provider': 'qiskit.BasicAer', 'name': 'statevector_simulator'}, 'optimizer': {'name': 'COBYLA'}, 'variational_form': {'name': 'RYRZ', 'depth': 3}, 'feature_map': {'name': 'SecondOrderExpansion', 'depth': 2} } result = run_algorithm(params, ClassificationInput(self.training_data, self.testing_data)) ref_train_loss = 0.1059404 np.testing.assert_array_almost_equal(result['training_loss'], ref_train_loss, decimal=4) self.assertEqual(result['testing_accuracy'], 0.5)
def test_vqc_with_max_evals_grouped(self): """ vqc with max evals grouped test """ params = { 'problem': {'name': 'classification', 'random_seed': self.seed}, 'algorithm': {'name': 'VQC', 'max_evals_grouped': 2}, 'backend': {'provider': 'qiskit.BasicAer', 'name': 'qasm_simulator', 'shots': 1024}, 'optimizer': {'name': 'SPSA', 'max_trials': 10, 'save_steps': 1}, 'variational_form': {'name': 'RYRZ', 'depth': 3}, 'feature_map': {'name': 'SecondOrderExpansion', 'depth': 2} } result = run_algorithm(params, ClassificationInput(self.training_data, self.testing_data)) np.testing.assert_array_almost_equal(result['opt_params'], self.ref_opt_params, decimal=8) np.testing.assert_array_almost_equal(result['training_loss'], self.ref_train_loss, decimal=8) self.assertEqual(1.0, result['testing_accuracy'])
def test_vqc_with_raw_feature_vector_on_wine(self): feature_dim = 4 # dimension of each data point training_dataset_size = 20 testing_dataset_size = 10 random_seed = 10598 np.random.seed(random_seed) sample_total, training_input, test_input, class_labels = wine_data( training_size=training_dataset_size, test_size=testing_dataset_size, n=feature_dim) params = { 'problem': { 'name': 'classification', 'random_seed': self.random_seed }, 'algorithm': { 'name': 'VQC' }, 'backend': { 'provider': 'qiskit.BasicAer', 'name': 'statevector_simulator' }, 'optimizer': { 'name': 'COBYLA', 'maxiter': 200 }, 'variational_form': { 'name': 'RYRZ', 'depth': 3 }, 'feature_map': { 'name': 'RawFeatureVector', 'feature_dimension': feature_dim } } result = run_algorithm(params, ClassificationInput(training_input, test_input)) self.log.debug(result['testing_accuracy']) self.assertGreater(result['testing_accuracy'], 0.85)
def setUp(self): super().setUp() warnings.filterwarnings("ignore", message=aqua_globals.CONFIG_DEPRECATION_MSG, category=DeprecationWarning) self.random_seed = 10598 self.shots = 12000 aqua_globals.random_seed = self.random_seed self.training_data = { 'A': np.asarray([[2.95309709, 2.51327412], [3.14159265, 4.08407045]]), 'B': np.asarray([[4.08407045, 2.26194671], [4.46106157, 2.38761042]]) } self.testing_data = { 'A': np.asarray([[3.83274304, 2.45044227]]), 'B': np.asarray([[3.89557489, 0.31415927]]) } self.svm_input = ClassificationInput(self.training_data, self.testing_data)
def __init__(self): self.move = 0 self.data_file = 'data.csv' self.data_path = 'PlayerLogic' self.feature_dim = 9 # dimension of each data point sample_Total, training_input, test_input, class_labels = VQCQPlayer.userDefinedData( self.data_path, self.data_file, ['0', '1', '2', '3', '4', '5', '6', '7', '8'], training_size=6000, test_size=500, n=self.feature_dim, PLOT_DATA=False) temp = [test_input[k] for k in test_input] total_array = np.concatenate(temp) aqua_dict = { 'problem': { 'name': 'classification' }, 'algorithm': { 'name': 'SVM' }, 'multiclass_extension': { 'name': 'AllPairs' } } algo_input = ClassificationInput(training_input, test_input, total_array) from qiskit.aqua import QiskitAqua aqua_obj = QiskitAqua(aqua_dict, algo_input) self.algo_obj = aqua_obj.quantum_algorithm logger.info("Training the SVM....") aqua_obj.run() logger.info("Trained!")
def runSvm(self): sample_Total, training_input, test_input, class_labels = self.Breast_cancer( ) temp = [test_input[k] for k in test_input] total_array = np.concatenate(temp) aqua_dict = { 'problem': { 'name': 'classification', 'random_seed': 10598 }, 'algorithm': { 'name': 'QSVM' }, 'backend': { 'provider': 'qiskit.BasicAer', 'name': 'qasm_simulator', 'shots': self.shots }, 'feature_map': { 'name': 'SecondOrderExpansion', 'depth': 2, 'entanglement': 'linear' } } for i in range(5): time.sleep(1) print( "Ignore Deprecation Warning!!! It can take few minutes. Please wait..." ) algo_input = ClassificationInput(training_input, test_input, total_array) result = run_algorithm(aqua_dict, algo_input) return result
}, 'backend': { 'shots': 1024 }, 'multiclass_extension': { 'name': 'OneAgainstRest' }, 'feature_map': { 'name': 'SecondOrderExpansion', 'depth': 2 } } training_dataset = { 'A': train_x[train_y == 0], 'B': train_x[train_y == 1], 'C': train_x[train_y == 2] } test_dataset = { 'A': test_x[test_y == 0], 'B': test_x[test_y == 1], 'C': test_x[test_y == 2] } total_arr = np.concatenate( (test_dataset['A'], test_dataset['B'], test_dataset['C'])) alg_input = ClassificationInput(training_dataset, test_dataset, total_arr) result = run_algorithm(params, algo_input=alg_input, backend=backend) result['test_success_ratio']
sample_Total, training_input, test_input, class_labels = Breast_cancer( training_size=40, test_size=10, n=n, PLOT_DATA=True) temp = [test_input[k] for k in test_input] total_array = np.concatenate(temp) aqua_dict = { 'problem': { 'name': 'classification', 'random_seed': 100 }, 'algorithm': { 'name': 'QSVM' }, 'backend': { 'provider': 'qiskit.BasicAer', 'name': 'qasm_simulator', 'shots': 256 }, 'feature_map': { 'name': 'SecondOrderExpansion', 'depth': 2, 'entanglement': 'linear' } } algo_input = ClassificationInput(training_input, test_input, total_array) result = run_algorithm(aqua_dict, algo_input) for k, v in result.items(): print("'{}' : {}".format(k, v))
def test_classical_multiclass_error_correcting_code(self): """ classical multiclass error correcting code test """ training_input = {'A': np.asarray([[0.6560706, 0.17605998], [0.25776033, 0.47628296], [0.79687342, 0.26933706], [0.39016555, -0.08469916], [0.3994399, 0.13601573], [0.26752049, -0.03978988], [0.24026485, 0.01953518], [0.49490503, 0.17239737], [0.70171827, 0.5323737], [0.43221576, 0.42357294], [0.62864856, 0.45504447], [0.6259567, 0.30917324], [0.58272403, 0.20760754], [0.3938784, 0.17184466], [0.14154948, 0.06201424], [0.80202323, 0.40582692], [0.46779595, 0.39946754], [0.57660199, 0.21821317], [0.51044761, 0.03699459], [0.8690704, 0.70847635]]), 'B': np.asarray([[0.38857596, -0.33775802], [0.49946978, -0.48727951], [-0.30119743, -0.11221681], [-0.16479252, -0.08640519], [-0.21808884, -0.56508327], [-0.14683258, -0.46528508], [-0.05888195, -0.51474852], [0.20517435, -0.66839091], [0.25475584, -0.21239966], [0.55194854, 0.02789679], [-0.11542951, -0.54157026], [0.44625538, -0.49485869], [-0.14609118, -0.60719757], [0.18121305, -0.1922198], [0.19283785, -0.31798925], [0.29626405, -0.54563098], [-0.39044304, -0.36527253], [-0.29432215, -0.43924164], [-0.40294517, -0.31381308], [0.49156185, -0.3660534]]), 'C': np.asarray([[-0.68088231, 0.46824423], [-0.56167659, 0.65270294], [-0.54323753, 0.67630888], [-0.57685569, -0.08515631], [-0.67765364, 0.19654347], [-0.62129115, 0.22223066], [-0.78040851, 0.65247848], [-0.50730279, 0.59898039], [-0.64275805, 0.63381998], [-0.72854201, 0.14151325], [-0.57004437, 0.12344874], [-0.55215973, 0.74331215], [-0.60916047, 0.52006917], [-0.23093745, 1.], [-0.84025337, 0.5564536], [-0.66952391, 0.57918859], [-0.67725082, 0.60439934], [-1., 0.23715261], [-0.62933025, 0.19055405], [-0.82139073, 0.29941512]])} test_input = {'A': np.asarray([[0.57483139, 0.47120732], [0.48372348, 0.25438544], [0.08791134, 0.11515506], [0.45988094, 0.32854319], [0.53015085, 0.41539212], [0.5073321, 0.47346751], [0.71081819, 0.19202569], [1., 0.51698289], [0.630973, 0.19898666], [0.48142649, 0.15931707]]), 'B': np.asarray([[-0.06048935, -0.48345293], [-0.01065613, -0.33910828], [-0.17323832, -0.49535592], [0.14043268, -0.87869109], [-0.15046837, -0.47340207], [-0.39600934, -0.21647957], [-0.394202, -0.44705385], [0.15243621, -0.36695163], [0.06195634, -0.23262325], [0.06183066, -0.53376975]]), 'C': np.asarray([[-0.74561108, 0.27047295], [-0.69942965, 0.11885162], [-0.52649891, 0.35265538], [-0.54345106, 0.13113995], [-0.57181448, 0.13594725], [-0.33713329, 0.05095243], [-0.65741384, 0.477976], [-0.79986067, 0.41733195], [-0.73856328, 0.80699537], [-0.66489165, 0.1181712]])} temp = [test_input[k] for k in sorted(test_input)] total_array = np.concatenate(temp) params = { 'problem': {'name': 'classification'}, 'algorithm': { 'name': 'SVM', }, 'multiclass_extension': {'name': 'ErrorCorrectingCode', 'code_size': 5}, } algo_input = ClassificationInput(training_input, test_input, total_array) result = run_algorithm(params, algo_input) self.assertEqual(result['testing_accuracy'], 1.0) self.assertEqual(result['predicted_classes'], ['A', 'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'C', 'C', 'C', 'C', 'C', 'C', 'C', 'C', 'C', 'C'])
'feature_map': {'name': 'SecondOrderExpansion', 'depth': int(args.featMapDepth)} } if args.steerCobylaOptim: params = { 'problem': {'name': 'classification', 'random_seed': 420 }, 'algorithm': {'name': 'VQC', 'override_SPSA_params': True}, 'backend': {'shots': int(args.numberShots)}, 'optimizer': {'name': 'SLSQP','maxiter' : 10 , 'disp': True}, 'variational_form': {'name': 'RYRZ', 'depth': int(args.varFormDepth)}, 'feature_map': {'name': 'SecondOrderExpansion', 'depth': int(args.featMapDepth)} } print("Creating Classification input") classification_input = ClassificationInput(trainDict, testDict, x_test) print("Getting Backend") backend = BasicAer.get_backend('qasm_simulator') result=None if not args.steerTestRun: print("Running Algorithm") result = run_algorithm(params, classification_input, backend=backend) print("testing success ratio: ", result['testing_accuracy']) print("predicted classes:", result['predicted_classes']) #time or tag setting in name outtag="_".join([str(vars(args)[i]) if not "steer" in str(i) else "" for i in vars(args)]) outtag+="_%s"%(int(time.time())) pklFile=open("{0}/qicl_test_{1}.pkl".format(args.steerOutDir,outtag),'wb') pickle.dump( result , pklFile) pickle.dump( vars(args) , pklFile)
def test_classical_binary(self): """ classical binary test """ training_input = {'A': np.asarray([[0.6560706, 0.17605998], [0.25776033, 0.47628296], [0.79687342, 0.26933706], [0.39016555, -0.08469916], [0.3994399, 0.13601573], [0.26752049, -0.03978988], [0.24026485, 0.01953518], [0.49490503, 0.17239737], [0.70171827, 0.5323737], [0.43221576, 0.42357294], [0.62864856, 0.45504447], [0.6259567, 0.30917324], [0.58272403, 0.20760754], [0.3938784, 0.17184466], [0.14154948, 0.06201424], [0.80202323, 0.40582692], [0.46779595, 0.39946754], [0.57660199, 0.21821317], [0.51044761, 0.03699459], [0.8690704, 0.70847635]]), 'B': np.asarray([[0.38857596, -0.33775802], [0.49946978, -0.48727951], [-0.30119743, -0.11221681], [-0.16479252, -0.08640519], [-0.21808884, -0.56508327], [-0.14683258, -0.46528508], [-0.05888195, -0.51474852], [0.20517435, -0.66839091], [0.25475584, -0.21239966], [0.55194854, 0.02789679], [-0.11542951, -0.54157026], [0.44625538, -0.49485869], [-0.14609118, -0.60719757], [0.18121305, -0.1922198], [0.19283785, -0.31798925], [0.29626405, -0.54563098], [-0.39044304, -0.36527253], [-0.29432215, -0.43924164], [-0.40294517, -0.31381308], [0.49156185, -0.3660534]])} test_input = {'A': np.asarray([[0.57483139, 0.47120732], [0.48372348, 0.25438544], [0.08791134, 0.11515506], [0.45988094, 0.32854319], [0.53015085, 0.41539212], [0.5073321, 0.47346751], [0.71081819, 0.19202569], [1., 0.51698289], [0.630973, 0.19898666], [0.48142649, 0.15931707]]), 'B': np.asarray([[-0.06048935, -0.48345293], [-0.01065613, -0.33910828], [-0.17323832, -0.49535592], [0.14043268, -0.87869109], [-0.15046837, -0.47340207], [-0.39600934, -0.21647957], [-0.394202, -0.44705385], [0.15243621, -0.36695163], [0.06195634, -0.23262325], [0.06183066, -0.53376975]])} temp = [test_input[k] for k in sorted(test_input)] total_array = np.concatenate(temp) params = { 'problem': {'name': 'classification'}, 'algorithm': { 'name': 'SVM', } } algo_input = ClassificationInput(training_input, test_input, total_array) result = run_algorithm(params, algo_input) self.assertEqual(result['testing_accuracy'], 1.0) self.assertEqual(result['predicted_classes'], ['A', 'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B'])