def get_counts_from_jobs(jobs_list, return_job_headers=False ) -> Dict[str, Dict[str, int]]: anf.cool_print('Getting counts...') counts_dictionary, job_headers = {}, [] for job_index in tqdm(range(len(jobs_list))): job_now = jobs_list[job_index] job_header_now = job_now.result().qobj_id job_headers.append(job_header_now) results_object_now = job_now.result() results_list_now = list(results_object_now.results) for exp_index in range(len(results_list_now)): circuit_name_now = results_list_now[exp_index].header.name counts_now = results_object_now.get_counts(circuit_name_now) counts_dictionary[circuit_name_now] = counts_now if return_job_headers: return counts_dictionary, job_headers else: return counts_dictionary
def get_noise_matrix_dependent(self, qubits_of_interest: List[int], neighbors_of_interest: List[int]) -> dict: """Description: like self._compute_noise_matrix_dependent but checks whether matrices were already calculated to prevent multiple computations of the same matrices :param qubits_of_interest: labels of qubits in marginal we are interested in :param neighbors_of_interest: labels of qubits that affect noise matrix on qubits_of_interest :return conditional_noise_matrices_dictionary: """ cluster_key = self.get_qubits_key(qubits_of_interest) if cluster_key not in self._noise_matrices_dictionary.keys(): self.compute_subset_noise_matrices_averaged([qubits_of_interest]) if len(neighbors_of_interest) == 0 or neighbors_of_interest is None: neighbors_key = 'averaged' if neighbors_key in self._noise_matrices_dictionary[cluster_key]: if not anf.is_stochastic( self._noise_matrices_dictionary[cluster_key] ['averaged']): anf.cool_print('Bug is here') print(cluster_key, neighbors_key) # TODO FBM: SOMETHING IS BROKEN self._noise_matrices_dictionary[cluster_key][ 'averaged'] = self._compute_noise_matrix_averaged( qubits_of_interest) if not anf.is_stochastic( self._noise_matrices_dictionary[cluster_key] ['averaged']): anf.cool_print('And I cant fix it') # anf.print_array_nicely(self._noise_matrices_dictionary[cluster_key]['averaged']) return { 'averaged': self._noise_matrices_dictionary[cluster_key]['averaged'] } else: return self._compute_noise_matrix_dependent( qubits_of_interest, neighbors_of_interest) else: neighbors_key = 'q' + 'q'.join( [str(s) for s in neighbors_of_interest]) if neighbors_key in self._noise_matrices_dictionary[cluster_key]: return self._noise_matrices_dictionary[cluster_key][ neighbors_key] else: return self._compute_noise_matrix_dependent( qubits_of_interest, neighbors_of_interest)
def get_absent_symbols_amount(self): t0 = time.time() zero_subsets = 0 for subset in self._subsets: subset_counts = self._dictionary_symbols_counting[ self.get_qubits_key(subset)] zero_subsets += len(subset_counts) - np.count_nonzero( subset_counts) self._circuits_properties_dictionary[ 'absent_elements_amount'] = zero_subsets anf.cool_print('This took:', time.time() - t0) return zero_subsets
def compute_perfect_collection(self, method_name='bruteforce_randomized', method_kwargs=None): """ Find perfect collection of overlapping circuits. "Perfect" means that for each subset of qubits self._subsets_list[i], each symbol out of self._number_of_symbols^self._subsets_list[i], appears in the collection at least once. :param method_name: possible values: 1. 'bruteforce' - see self._compute_perfect_collection_bruteforce 2. 'bruteforce_randomized' - see self._compute_perfect_collection_bruteforce_randomized :param method_kwargs: kwargs for chosen method, see corresponding methods' descriptions :return: """ if method_name == 'bruteforce': if method_kwargs is None: method_kwargs = {'circuits_in_batch': 1, 'print_updates': True} self._compute_perfect_collection_bruteforce(**method_kwargs) elif method_name == 'bruteforce_randomized': if method_kwargs is None: method_kwargs = { 'number_of_iterations': 100, 'circuits_in_batch': 1, 'print_updates': True, 'optimized_quantity': 'minimal_amount', 'additional_circuits': 0 } self._compute_perfect_collection_bruteforce_randomized( **method_kwargs) absent_elements = self._circuits_properties_dictionary[ 'absent_elements_amount'] if absent_elements != 0: anf.cool_print( '________WARNING________:', 'The collection is not perfect. ' 'It is missing %s' % absent_elements + ' elements!\nTry increasing limit on the circuits amount.', 'red')
def download_multiple_jobs(backend_name, job_IDs_list): IBMQ.load_account() if backend_name in [ 'qasm_simulator', 'statevector_simulator', 'unitary_simulator' ]: raise ValueError('Local simulators do not store jobs online.') else: provider = IBMQ.get_provider(group='open') backend = provider.get_backend(backend_name) all_jobs = [] for job_ID in job_IDs_list: anf.cool_print('Getting job with ID:', job_ID) job = backend.retrieve_job(job_ID) anf.cool_print('Got it!') all_jobs.append(job) return all_jobs
def clusters_list(self, clusters_list: List[List[int]]) -> None: for cluster in clusters_list: cluster_string = self.get_qubits_key(cluster) if cluster_string not in self._noise_matrices_dictionary.keys(): average_noise_matrix_now = self._compute_noise_matrix_averaged( cluster) dictionary_now = {'averaged': average_noise_matrix_now} if cluster_string in self._neighborhoods.keys(): neighborhood_now = self._neighborhoods[cluster_string] dependent_noise_matrices = self._compute_noise_matrix_dependent( cluster, neighborhood_now) dictionary_now = { **dictionary_now, **dependent_noise_matrices } anf.cool_print('im doing this') self._noise_matrices_dictionary[self.get_qubits_key( cluster)] = dictionary_now self._clusters_list = clusters_list
def __init__( self, experimental_results_dictionary: Dict[str, Dict[str, int]], bitstrings_right_to_left: bool, correction_data_dictionary: dict, marginals_dictionary: Optional[Dict[str, Dict[str, np.ndarray]]] = None ) -> None: """ :param experimental_results_dictionary: dictionary of results of experiments we wish to correct (see class' description for conventions) :param bitstrings_right_to_left: specify whether bitstrings should be read from right to left (when interpreting qubit labels) :param correction_data_dictionary: dictionary that contains information needed for noise mitigation on marginal probability distributions. :param marginals_dictionary: in case we pre-computed some marginal distributions (see class' description for conventions) """ super().__init__(experimental_results_dictionary, bitstrings_right_to_left, marginals_dictionary) self._noise_matrices = correction_data_dictionary['noise_matrices'] if 'correction_matrices' in correction_data_dictionary.keys(): self._correction_matrices = correction_data_dictionary[ 'correction_matrices'] else: anf.cool_print('No correction matrices provided!', '', 'red') self._correction_indices = correction_data_dictionary[ 'correction_indices'] self._corrected_marginals = {}
def _compute_perfect_collection_bruteforce(self, circuits_in_batch: int, print_updates: bool): runs_number = 1 absent_elements_amount = self._elements_in_perfect_collection while absent_elements_amount > 0 and runs_number < self._maximal_circuits_amount: if runs_number % 20 == 0 and print_updates: anf.cool_print('Run number:', runs_number) anf.cool_print('Number of circuits:', len(self._circuits_list)) anf.cool_print('Absent elements amount:', absent_elements_amount) circuits_now = self.get_random_circuits(circuits_in_batch) self.add_circuits(circuits_now) added_elements = self.update_dictionary_subsets_symbols_counting( circuits_list=circuits_now, count_added_subcircuits=True) absent_elements_amount -= added_elements runs_number += 1
'amount': 10 ** (-9)}, 'additional_circuits': additional_circuits} # Compute perfect collection of DDOT circuits, meaning that we want circuits that implement every # computational basis state on each k-qubit subset at least once. base_OT.compute_perfect_collection(method_name=method_name, method_kwargs=method_kwargs) # get list of circuits DDOT_circuits = base_OT.circuits_list # Calculate various properties of circuits (and print them) base_OT.calculate_properties_of_circuits() circuits_properties = base_OT.circuits_properties_dictionary anf.cool_print("Properties of the family:\n", circuits_properties) # print(circuits_properties) if saving and anf.query_yes_no('Do you still wish to save?'): dictionary_to_save = {'circuits_list': DDOT_circuits, 'circuits_properties': circuits_properties} directory = anf.get_module_directory() + '/saved_data/data_circuits_collections/DDOT/' + \ 'locality_%s' % subsets_locality + '/number_of_qubits_%s' % number_of_qubits + '/' file_name = 'circuits_amount%s' % len(DDOT_circuits) + '_' + anf.gate_proper_date_string() directory_to_save = anf.save_results_pickle(dictionary_to_save=dictionary_to_save, directory=directory, custom_name=file_name
def compute_clusters(self, maximal_size: int, method: Optional[str] = 'holistic_v1', method_kwargs: Optional[dict] = None) -> list: """ Get partition of qubits in potentially_stochastic_matrix device into disjoint "clusters". This function uses various heuristic methods, specified via string "version". It uses table of correlations from class property self._correlations_table_pairs :param maximal_size: maximal allowed size of the cluster :param method: string specifying stochasticity_type of heuristic Possible values: 'pairwise' - heuristic that uses Algorithm 3 from Ref.[] 'holistic_v1' - heuristic that uses function partition_algorithm_v1_cummulative :param method_kwargs: potential arguments that will be passed to clustering function. For possible parameters see descriptions of particular functions. :return: clusters_labels_list: list of lists, each representing potentially_stochastic_matrix single cluster """ self._clusters_list = [] if method == 'pairwise': if method_kwargs is None: default_kwargs = { 'maximal_size': maximal_size, 'cluster_threshold': 0.02 } method_kwargs = default_kwargs elif 'maximal_size' in method_kwargs.keys(): if method_kwargs['maximal_size'] != maximal_size: raise ValueError( 'Disagreement between maximal size argument and method_name kwargs' ) else: method_kwargs['maximal_size'] = maximal_size clusters_list = self._compute_clusters_pairwise(**method_kwargs) elif method == 'holistic_v1': if method_kwargs is None: alpha = 1 algorithm_runs = 1000 default_kwargs = { 'alpha': alpha, 'N_alg': algorithm_runs, 'printing': False, 'drawing': False } method_kwargs = default_kwargs elif 'C_maxsize' in method_kwargs.keys(): # TODO FBM, OS: this variable should have name consistent with rest of functions if method_kwargs['C_maxsize'] != maximal_size: raise ValueError( 'Disagreement between maximal size argument and method_name kwargs' ) else: method_kwargs['C_maxsize'] = maximal_size clusters_list, score = partition_algorithm_v1_cummulative( self._correlations_table_pairs, **method_kwargs) anf.cool_print('Current partitioning got score:', score) else: raise ValueError('No heuristic with that name: ' + method) self._clusters_list = clusters_list return clusters_list
SDK_name = 'qiskit' # Define number of qubits you wish to create DDOT circuits for number_of_qubits = 5 qubit_indices = list(range(number_of_qubits)) # Locality of subsets we wish to investigate. For example, k=2 will implement all computational-basis # states (00, 01, 10, 11) on ALL qubit pairs. subsets_locality = 2 directory = anf.get_module_directory() + '/saved_data/data_circuits_collections/DDOT/' + \ '/locality_%s' % subsets_locality + '/number_of_qubits_%s' % number_of_qubits + '/' files = sorted(os.listdir(directory)) anf.cool_print('Available files:\n', files) anf.cool_print('Choosing file:\n', files[-1]) with open(directory + files[-1], 'rb') as filein: dictionary_data = pickle.load(filein) circuits_labels = dictionary_data['circuits_list'] circuits_amount = len(circuits_labels) circuits_creator = CircuitsCreatorDDOT(SDK_name=SDK_name, qubit_indices=qubit_indices, circuits_labels=circuits_labels, number_of_repetitions=1, add_barriers=True) # print(circuits_labels)
def _compute_perfect_collection_bruteforce_randomized( self, number_of_iterations: int, circuits_in_batch: int, print_updates: bool, optimized_quantity: Union[str, Dict[str, float]], additional_circuits: Optional[int] = 0): """ This function implements self._compute_perfect_collection_bruteforce for number_of_iterations times, then adds additional_circuits number of random circuits, computes cost function and chooses the family that minimizes cost function. :param number_of_iterations: how many times random perfect family should be generated :param circuits_in_batch: see self._compute_perfect_collection_bruteforce :param print_updates: whether to print updates during optimization :param optimized_quantity: specify what cost function is Possible string values: 1. 'minimal_amount' - maximizes number of least-frequent subset-circuits 2. 'spread' - minimizes difference between maximal and minimal number of subset-circuits 3. 'circuits_amount' - minimizes number of circuits (NOTE: it does not make sense to choose this option with additional_circuits>0) 4. 'SD' - minimizes standard deviation of occurrences of subset-circuits It is possible to use combined cost functions. Dictionary must be provided where KEY is label for optimized quantity and VALUE is its weight. For example: optimized_quantity = {'minimal_amount': 1.0, 'spread':0.5} will give cost function which returns 1.0 * (-number of least frequent circuits) + 0.5 * (difference between most and least fequenet circuits) :param additional_circuits: number of circuits which are to be added to the PERFECT collection obtained in optimization loop. Those are "additional" circuits in a sense that they are not needed for collection to be perfect, but instead are used to obtain better values of cost function or just add more experiments reduce statistical noise. :return: """ if isinstance(optimized_quantity, str): cost_function = self._get_proper_cost_function( optimized_quantity=optimized_quantity) elif isinstance(optimized_quantity, dict): cost_function = self._add_cost_functions( dictionary_cost_functions=optimized_quantity) runs_range = range(number_of_iterations) if self._show_progress_bars: runs_range = tqdm(runs_range) # circuit_families = [] # best_family = None global_cost, best_family = 10**6, None for runs_number in runs_range: if runs_number % int(np.ceil( number_of_iterations / 20)) == 0 and print_updates: anf.cool_print('Run number:', runs_number, 'red') anf.cool_print('Current best value of cost function:', global_cost) self.reset_everything() self._compute_perfect_collection_bruteforce( circuits_in_batch=circuits_in_batch, print_updates=False) if additional_circuits > 0: current_length, maximal_length = len( self._circuits_list), self._maximal_circuits_amount if current_length < maximal_length: if additional_circuits > maximal_length - current_length: adding_circuits = maximal_length - current_length else: adding_circuits = copy.deepcopy(additional_circuits) # print(adding_circuits) new_circuits = self.get_random_circuits(adding_circuits) self.update_dictionary_subsets_symbols_counting( new_circuits) self.add_circuits(new_circuits) self.calculate_properties_of_circuits() cost_now = cost_function() if cost_now < global_cost: best_family = copy.deepcopy(self._circuits_list) global_cost = cost_now anf.cool_print('best family length', len(best_family), 'red') self.reset_everything() self._circuits_list = best_family self.update_dictionary_subsets_symbols_counting() self.calculate_properties_of_circuits()
estimated_neighbors_list.append(estimated_neighbors['q%s' % qi]) else: estimated_neighbors_list.append(None) noise_matrices_dictionary = noise_model_analyzer.noise_matrices_dictionary import colorama for qi in range(number_of_qubits): cluster_string_now = 'q%s' % qi estimated_noise_matrices = noise_matrices_dictionary[cluster_string_now] true_matrices = true_noise_matrices[cluster_string_now] print() anf.cool_print('estimated matrices for cluster: ', [qi]) print(estimated_noise_matrices) anf.cool_print('true matrices for cluster: ', [qi], colorama.Fore.BLUE) print(true_matrices) noise_model_analyzer.clusters_list = estimated_clusters noise_model_analyzer.neighborhoods = estimated_neighbors global_noise_creator_estimate = GlobalNoiseMatrixCreator( noise_model_analyzer.noise_matrices_dictionary, ) global_noise_matrix_estimated = global_noise_creator_estimate.compute_global_noise_matrix( estimated_clusters, estimated_neighbors_list)
def run_batches( batches, backend_name, shots=8192, saving_IDs_dictionary={ 'saving': False, 'directory': None, 'file_name': None, 'dictionary_to_save': {} }): # IBMQ.load_account() # IBMQ.load_account() # raise KeyError saving = saving_IDs_dictionary['saving'] anf.cool_print('\nSending jobs_list to execution on: ', backend_name + '.') anf.cool_print('Number of shots: ', str(shots) + ' .') anf.cool_print('Target number of jobs_list: ', str(len(batches)) + ' .') iterations_done = 0 wait_time_in_minutes = 10 print() jobs = [] while iterations_done < len(batches): anf.cool_print('job number:', str(iterations_done)) circuits = batches[iterations_done] if backend_name in [ 'qasm_simulator', 'statevector_simulator', 'unitary_simulator' ]: backend = Aer.get_backend(backend_name) else: IBMQ.load_account() provider = IBMQ.get_provider(group='open') backend = provider.get_backend(backend_name) try: time.sleep(2) qobj_id = 'first_circuit-' + circuits[ 0].name + '-last_circuit-' + circuits[ -1].name + '-date-' + anf.get_date_string('-') anf.cool_print("Sending quantum program to: ", backend_name + '.') job = qiskit.execute(circuits, backend, shots=shots, max_credits=200, qobj_id=qobj_id) if saving and backend_name not in [ 'qasm_simulator', 'statevector_simulator', 'unitary_simulator' ]: job_ID = job.job_id() dict_to_save = saving_IDs_dictionary['dictionary_to_save'] dict_to_save['job_ID'] = job_ID anf.save_results_pickle( dictionary_to_save=dict_to_save, directory=saving_IDs_dictionary['directory'], custom_name=saving_IDs_dictionary['file_name'] + '_job%s' % iterations_done) jobs.append(job) while job.status() == JobStatus.INITIALIZING: print(job.status()) time.sleep(2) anf.cool_print("Program sent for execution to: ", backend_name + '.') except BaseException as ex: print('There was an error in the circuit!. Error = {}'.format(ex)) print(f'Waiting {wait_time_in_minutes} minute(s) before next try.') time.sleep(wait_time_in_minutes * 60) continue print() iterations_done += 1 return jobs