예제 #1
0
# Compute perfect collection of DDOT circuits, meaning that we want circuits that implement every
# computational basis state on each k-qubit subset at least once.
base_OT.compute_perfect_collection(method_name=method_name,
                                   method_kwargs=method_kwargs)

# get list of circuits
DDOT_circuits = base_OT.circuits_list

# Calculate various properties of circuits (and print them)
base_OT.calculate_properties_of_circuits()

circuits_properties = base_OT.circuits_properties_dictionary
anf.cool_print("Properties of the family:\n", circuits_properties)
# print(circuits_properties)


if saving and anf.query_yes_no('Do you still wish to save?'):
    dictionary_to_save = {'circuits_list': DDOT_circuits,
                          'circuits_properties': circuits_properties}

    directory = anf.get_module_directory() + '/saved_data/data_circuits_collections/DDOT/' + \
                'locality_%s' % subsets_locality + '/number_of_qubits_%s' % number_of_qubits + '/'

    file_name = 'circuits_amount%s' % len(DDOT_circuits) + '_' + anf.gate_proper_date_string()

    directory_to_save = anf.save_results_pickle(dictionary_to_save=dictionary_to_save,
                                                directory=directory,
                                                custom_name=file_name
                                                )
예제 #2
0
    'clusters_labels_list'], dictionary_data['neighborhoods']

# Get instance of correction data generator - it will generate correction matrices for marginals_dictionary
# based on provided noise model
correction_data_generator = CorrectionDataGenerator(
    results_dictionary_ddt=dictionary_results,
    bitstrings_right_to_left=bitstrings_right_to_left,
    number_of_qubits=number_of_qubits,
    marginals_dictionary=marginal_dictionaries_initial,
    clusters_list=clusters_list,
    neighborhoods=neighborhoods,
    noise_matrices_dictionary=noise_matrices_dictionary_initial)

all_pairs = [[qi, qj] for qi in list_of_qubits for qj in list_of_qubits
             if qj > qi]

# Get data needed to make corrections for two-qubit marginals_dictionary (for 2-local Hamiltonian problems)
correction_data = correction_data_generator.get_pairs_correction_data(
    all_pairs, show_progress_bar=True)

if saving:
    # Update dictionary to be saved
    dictionary_data['correction_data'] = correction_data

    # Save results
    directory = tests_directory + 'mitigation_on_marginals/' + backend_name \
                + '/number_of_qubits_%s' % number_of_qubits + '/' + date + '/DDOT/'

    anf.save_results_pickle(dictionary_data, directory,
                            '04_test_results_correction_data')
                                + backend_name + "/number_of_qubits_%s" % number_of_qubits + "/counts_raw/" \
                                + date_string + "/"

    file_name = 'circuits_amount%s' % circuits_amount + '_' + date_string

    dictionary_to_save = {
        'true_qubits': dictionary_data['true_qubits'],
        'experiment_name': "DDOT",
        'circuits_labels': dictionary_data['circuits_labels'],
        'SDK_name': backend_name,
        'date': date_string,
        'unprocessed_counts': unprocessed_results
    }

    anf.save_results_pickle(dictionary_to_save=dictionary_to_save,
                            directory=save_directory_raw_counts,
                            custom_name=file_name)

    processed_results = fdt.convert_counts_overlapping_tomography(
        counts_dictionary=unprocessed_results, experiment_name="DDOT")

    save_directory_counts_processed = anf.get_module_directory() + "/saved_data/tomography_results/DDOT/" \
                                      + backend_name + "/number_of_qubits_%s" % number_of_qubits \
                                      + "/counts_processed/" + date_string + "/"

    dictionary_to_save['results_dictionary_preprocessed'] = processed_results

    anf.save_results_pickle(dictionary_to_save=dictionary_to_save,
                            directory=save_directory_counts_processed,
                            custom_name=file_name)
예제 #4
0
def run_batches(
    batches,
    backend_name,
    shots=8192,
    saving_IDs_dictionary={
        'saving': False,
        'directory': None,
        'file_name': None,
        'dictionary_to_save': {}
    }):
    # IBMQ.load_account()
    # IBMQ.load_account()
    # raise KeyError
    saving = saving_IDs_dictionary['saving']

    anf.cool_print('\nSending jobs_list to execution on: ', backend_name + '.')
    anf.cool_print('Number of shots: ', str(shots) + ' .')
    anf.cool_print('Target number of jobs_list: ', str(len(batches)) + ' .')

    iterations_done = 0
    wait_time_in_minutes = 10

    print()
    jobs = []
    while iterations_done < len(batches):
        anf.cool_print('job number:', str(iterations_done))
        circuits = batches[iterations_done]

        if backend_name in [
                'qasm_simulator', 'statevector_simulator', 'unitary_simulator'
        ]:
            backend = Aer.get_backend(backend_name)
        else:
            IBMQ.load_account()
            provider = IBMQ.get_provider(group='open')
            backend = provider.get_backend(backend_name)

        try:
            time.sleep(2)
            qobj_id = 'first_circuit-' + circuits[
                0].name + '-last_circuit-' + circuits[
                    -1].name + '-date-' + anf.get_date_string('-')
            anf.cool_print("Sending quantum program to: ", backend_name + '.')
            job = qiskit.execute(circuits,
                                 backend,
                                 shots=shots,
                                 max_credits=200,
                                 qobj_id=qobj_id)

            if saving and backend_name not in [
                    'qasm_simulator', 'statevector_simulator',
                    'unitary_simulator'
            ]:
                job_ID = job.job_id()
                dict_to_save = saving_IDs_dictionary['dictionary_to_save']
                dict_to_save['job_ID'] = job_ID
                anf.save_results_pickle(
                    dictionary_to_save=dict_to_save,
                    directory=saving_IDs_dictionary['directory'],
                    custom_name=saving_IDs_dictionary['file_name'] +
                    '_job%s' % iterations_done)

            jobs.append(job)
            while job.status() == JobStatus.INITIALIZING:
                print(job.status())
                time.sleep(2)

            anf.cool_print("Program sent for execution to: ",
                           backend_name + '.')

        except BaseException as ex:
            print('There was an error in the circuit!. Error = {}'.format(ex))
            print(f'Waiting {wait_time_in_minutes} minute(s) before next try.')
            time.sleep(wait_time_in_minutes * 60)
            continue

        print()
        iterations_done += 1

    return jobs