def test_meas_fitter_with_noise(self): """ Test the MeasurementFitter with noise """ print("Testing MeasurementFitter with noise") # pre-generated results with noise # load from pickled file fo = open( os.path.join(os.path.dirname(__file__), 'test_meas_results.pkl'), 'rb') tests = pickle.load(fo) fo.close() # Set the state labels state_labels = ['000', '001', '010', '011', '100', '101', '110', '111'] meas_cal = CompleteMeasFitter(None, state_labels, circlabel='test') for tst_index, _ in enumerate(tests): # Set the calibration matrix meas_cal.cal_matrix = tests[tst_index]['cal_matrix'] # Calculate the fidelity fidelity = meas_cal.readout_fidelity() meas_filter = MeasurementFilter(tests[tst_index]['cal_matrix'], state_labels) # Calculate the results after mitigation output_results_pseudo_inverse = meas_filter.apply( tests[tst_index]['results'], method='pseudo_inverse') output_results_least_square = meas_filter.apply( tests[tst_index]['results'], method='least_squares') # Compare with expected fidelity and expected results self.assertAlmostEqual(fidelity, tests[tst_index]['fidelity'], places=0) self.assertAlmostEqual( output_results_pseudo_inverse['000'], tests[tst_index]['results_pseudo_inverse']['000'], places=0) self.assertAlmostEqual( output_results_least_square['000'], tests[tst_index]['results_least_square']['000'], places=0) self.assertAlmostEqual( output_results_pseudo_inverse['111'], tests[tst_index]['results_pseudo_inverse']['111'], places=0) self.assertAlmostEqual( output_results_least_square['111'], tests[tst_index]['results_least_square']['111'], places=0)
class MeasurementErrorMitigation: def __init__(self, backend, qubits): self.backend = backend self.qubits = qubits self.filter = None def run_experiment(self, circuits_per_state=1): exp = MeasurementErrorExperiment(self.qubits, circuits_per_state=circuits_per_state) exp_data = exp.run(backend=self.backend, shots=self.backend.configuration().max_shots) print('Experiment ID:', exp_data.experiment_id) exp_data.block_for_results() exp_data.save() self._load_from_exp_data(exp_data) return exp_data.experiment_id def load_matrix(self, experiment_id): exp_data = DbExperimentData.load( experiment_id, self.backend.provider().service("experiment")) self._load_from_exp_data(exp_data) def _load_from_exp_data(self, exp_data): analysis_result = exp_data.analysis_results()[0] self.filter = MeasurementFilter(analysis_result.value, analysis_result.extra) def apply(self, counts_list): corrected_counts = [] for counts in counts_list: corrected_counts.append(self.filter.apply(counts)) return corrected_counts
def generate_meas_calibration(results_file_path: str, runs: int): """ run the measurement calibration circuits, calculates the fitter matrix in few methods and saves the results The simulation results files will contain a list of dictionaries with the keys: cal_matrix - the matrix used to calculate the ideal measurement fidelity - the calculated fidelity of using this matrix results - results of a bell state circuit with noise results_pseudo_inverse - the result of using the psedo-inverse method on the bell state results_least_square - the result of using the least-squares method on the bell state Args: results_file_path: path of the json file of the results file runs: the number of different runs to save """ results = [] for run in range(runs): cal_results, state_labels, circuit_results = \ meas_calibration_circ_execution(3, 1000, SEED + run) meas_cal = CompleteMeasFitter(cal_results, state_labels, circlabel='test') meas_filter = MeasurementFilter(meas_cal.cal_matrix, state_labels) # Calculate the results after mitigation results_pseudo_inverse = meas_filter.apply(circuit_results, method='pseudo_inverse') results_least_square = meas_filter.apply(circuit_results, method='least_squares') results.append({ "cal_matrix": convert_ndarray_to_list_in_data(meas_cal.cal_matrix), "fidelity": meas_cal.readout_fidelity(), "results": circuit_results, "results_pseudo_inverse": results_pseudo_inverse, "results_least_square": results_least_square }) with open(results_file_path, "w") as results_file: json.dump(results, results_file)
def create_filters(interested_qubits, QDT_correlated, shots_per_point=1024, seed=227, show_denoised=False, from_file=False, file_address=''): """Return filters from our method, Qiskit method, QDt, and Standard Bayesian. Args: interested_qubits: an array of ints QDT_correlated: True if want qubits corrlected in QDT method. seed: seed for our method. file_address: file address, string ends with '/' if not empty Returns: our filer, qiskit method filter, QDt filter, and filter from Standard Bayesian """ # Read Data from Standard Bayesian print('Standard Bayesian filter') post_dict = {} try: for q in interested_qubits: post_dict['Qubit{}'.format(q)] = pd.read_csv( file_address + 'StandPostQubit{}.csv'.format(q)).to_numpy() SB_filter = MeasFilterSB(interested_qubits, post_dict) except FileNotFoundError as e: raise Exception('Please run R code for Standard Bayesian Inference') print('Our Filter') mf = MeasFilter(interested_qubits, file_address=file_address) if from_file: mf.post_from_file() else: mf.inference(nPrior=40000, seed=seed, show_denoised=show_denoised, shots_per_point=shots_per_point) # Qiskit method print('Qiskit filter') cal_matrix = np.genfromtxt(file_address + 'cal_matrix.csv', delimiter=',') with open(file_address + 'state_labels.csv', mode='r') as sgm: reader = csv.reader(sgm) state_labels = np.asarray([row for row in reader][0]) qiskit_filter = MeasurementFilter(cal_matrix, state_labels) #qiskit_filter = MeasFilterMat(cal_matrix) #QDT matrix print('QDT filter') if QDT_correlated: trans_matrix = np.genfromtxt(file_address + 'trans_matrix.csv', delimiter=',') else: trans_dict = {} for q in interested_qubits: trans_dict['Qubit{}'.format(q)] = np.genfromtxt( file_address + 'trans_matrix{}.csv'.format(q), delimiter=',') first = True for q in interested_qubits: if first: Mx = trans_dict['Qubit' + str(q)] first = False else: Mx = np.kron(Mx, trans_dict['Qubit' + str(q)]) trans_matrix = Mx QDT_filter = MeasFilterQDT(trans_matrix) # # Read Data from Standard Bayesian # print('Standard Bayesian filter') # post_dict = {} # try: # for q in interested_qubits: # post_dict['Qubit{}'.format(q)] = pd.read_csv(file_address + 'StandPostQubit{}.csv'.format(q)).to_numpy() # SB_filter = MeasFilterSB(interested_qubits,post_dict) # except FileNotFoundError as e: # raise('Please run R code for Standard Bayesian Inference') return mf, qiskit_filter, QDT_filter, SB_filter
def _load_from_exp_data(self, exp_data): analysis_result = exp_data.analysis_results()[0] self.filter = MeasurementFilter(analysis_result.value, analysis_result.extra)