Esempio n. 1
0
def plot_6():
    size = 6
    critical_detuning = -9.604213726908476  #-6.019449429835163
    critical_detunings = np.concatenate(
        [-np.linspace(2, 10, 10), [critical_detuning]])
    graph_index = 807
    graph_data = loadfile(graph_index, size)
    grid = graph_data['graph_mask']
    print('Initializing graph')
    graph = unit_disk_grid_graph(grid, periodic=False, radius=1.6)
    graph_finite = unit_disk_grid_graph(grid, periodic=False, radius=1.1)

    graph_finite.generate_independent_sets()

    n_points = 7
    times = 2**np.linspace(-2.5, 4.5 / 6 * (n_points - 1) - 2.5, n_points)
    times = np.concatenate([times, [2.5]])  # np.array([2.5])#
    times = times + .312 * 2
    cost = hamiltonian.HamiltonianMIS(graph, IS_subspace=True)

    performances = np.zeros((len(critical_detunings), len(times))) * np.nan
    for (d, detuning) in enumerate(critical_detunings):
        for (t, tf) in enumerate(times):
            try:
                cost.energies = (1, )
                state = State(np.load('{}x{}_{}_{}_{}_trotterize.npz'.format(
                    size, size, graph_index, np.round(np.abs(detuning), 2),
                    np.round(np.abs(tf), 2)))['state'],
                              is_ket=True,
                              IS_subspace=True,
                              graph=graph)
                performances[d, t] = MIS_probability_finite(
                    state, graph, graph_finite)
                print(tf, detuning, performances[d, t])
            except:
                pass

    colors = [
        'blue', 'green', 'navy', 'orange', 'firebrick', 'purple', 'magenta',
        'cornflowerblue', 'teal', 'grey', 'cyan', 'limegreen', 'red', 'yellow',
        'pink', 'orangered', 'salmon', 'violet'
    ]

    for (d, detuning) in enumerate(critical_detunings):
        plt.scatter(times - 2 * .312,
                    performances[d],
                    color=colors[d],
                    label='Detuning $={}$'.format(np.round(detuning, 2)))
        plt.plot(times - 2 * .312, performances[d], color=colors[d])
    print(repr(performances))
    plt.xlabel('Total time ($\mu s$)')
    plt.ylabel('MIS probability')
    plt.semilogx()
    plt.legend()
    plt.show()
def collect_gap_statistics(size):
    size_indices = np.array([5, 6, 7, 8, 9, 10])
    size_index = np.argwhere(size == size_indices)[0, 0]
    gaps = []
    ratios = []
    degeneracies = []
    locs = []
    graph_indices = []
    for index in range(38):
        xls = pd.ExcelFile('MIS_degeneracy_ratio.xlsx')
        # graph_index = graphs[index]
        graph_index = int(
            pd.read_excel(xls, 'Sheet1').to_numpy()[index, size_index])
        graph_indices.append(graph_index)
        print(graph_index)
        graph_data = loadfile(graph_index, size)
        grid = graph_data['graph_mask']
        # We want to have the file of hardest graphs, then import them
        graph = unit_disk_grid_graph(grid, periodic=False, generate_mixer=True)
        # tails_graph = rydberg_graph(grid, visualize=False)
        # print(np.arange(graph.num_independent_sets, 0, -1))
        # print(np.sum(2 ** np.arange(graph.n, 0, -1) * (1 - graph.independent_sets), axis=1))
        # print(1-graph.independent_sets)
        print('Degeneracy', graph.degeneracy)
        print('MIS size', graph.mis_size)
        print('Hilbert space size', graph.num_independent_sets)
        # tails_graph = rydberg_graph(grid, visualize=False)
        """gap_linear, loc_linear = find_gap(graph, tails_graph, k=2)
Esempio n. 3
0
def count_maximal(graph_mask):
    graph = unit_disk_grid_graph(graph_mask)
    graph.generate_independent_sets()
    num_misminusone = len(np.argwhere(np.sum(1 - graph.independent_sets, axis=1) == graph.mis_size - 1))
    cliques = nx.algorithms.clique.find_cliques(nx.complement(graph.graph))
    num_maximal = 0
    for clique in cliques:
        size = len(clique)
        if size == graph.mis_size - 1:
            num_maximal += 1
    return num_misminusone, num_maximal, graph.degeneracy
def generate_hamiltonians(n, index):
    graph_mask = np.reshape(np.loadtxt(
        'configurations/mis_degeneracy_L%d.dat' % n)[index, 3:], (n, n),
                            order='F')[::-1, ::-1].T.astype(bool)
    graph = unit_disk_grid_graph(graph_mask, visualize=False)
    spin_flip = HamiltonianDriver(IS_subspace=True, graph=graph)
    detuning = HamiltonianMIS(IS_subspace=True, graph=graph)
    spin_exchange = HamiltonianSpinExchange(graph)
    onsite_term = HamiltonianOnsiteTerm(spin_exchange)

    return detuning._hamiltonian, spin_flip._hamiltonian, spin_exchange._hamiltonian, onsite_term._hamiltonian
Esempio n. 5
0
def collect_gap_statistics(size):
    size_indices = np.array([5, 6, 7, 8, 9, 10])
    size_index = np.argwhere(size == size_indices)[0, 0]
    gaps = []
    ratios = []
    degeneracies = []
    for index in range(100):
        xls = pd.ExcelFile('MIS_degeneracy_ratio.xlsx')
        """graphs = np.array([667, 557,  78, 312, 807, 776, 485, 980,  71,  50, 521, 773, 549,
           523, 374, 515, 669, 344,  21, 107, 201, 851, 736, 508, 286, 526,
           385, 116,  20, 999, 357, 149, 872, 233, 528, 603, 912, 820, 155,
           744, 438, 931,  68, 610, 209, 876, 558, 809, 702, 194, 828, 437,
           470, 958, 359, 677, 185, 813, 715, 420, 153, 573, 394, 542, 688,
           863, 771, 325, 502, 795, 617, 722, 793, 182, 363, 984, 447, 506,
           673, 950, 329, 127, 492, 428, 343, 391, 812, 949,  69, 265, 276,
           564, 336, 966, 963, 219, 321, 144, 435, 696])"""

        # graph_index = graphs[index]
        graph_index = int(
            pd.read_excel(xls, 'Sheet1').to_numpy()[index, size_index])

        graph_data = loadfile(graph_index, size)
        grid = graph_data['graph_mask']
        # We want to have the file of hardest graphs, then import them
        graph = unit_disk_grid_graph(grid, periodic=False)
        raise Exception
        # print(np.arange(graph.num_independent_sets, 0, -1))
        # print(np.sum(2 ** np.arange(graph.n, 0, -1) * (1 - graph.independent_sets), axis=1))
        # print(1-graph.independent_sets)
        print('Degeneracy', graph.degeneracy)
        print('MIS size', graph.mis_size)
        print('Hilbert space size', graph.num_independent_sets)
        # tails_graph = rydberg_graph(grid, visualize=False)
        gap, loc = find_gap(graph, k=2)
        gaps.append(gap)
        degeneracies.append(graph.degeneracy)
        is_sizes = np.sum(1 - graph.independent_sets, axis=1)
        ratio = np.sum(is_sizes == graph.mis_size -
                       1) / np.sum(is_sizes == graph.mis_size)
        ratios.append(ratio)
        print(gaps, degeneracies)
        999, 357, 149, 872, 233, 528, 603, 912, 820
    ])

    graphs = np.array([
        189, 623, 354, 40, 323, 173, 661, 345, 813, 35, 162, 965, 336, 667,
        870, 1, 156, 901, 576, 346
    ])

    graph_index = graphs[index]
    t = locs_7[index]
    # graph_index = 661
    # graph_index = 189
    graph_data = loadfile(graph_index, size)
    grid = graph_data['graph_mask']
    print('Initializing graph')
    graph = unit_disk_grid_graph(grid, periodic=False, radius=1.51)
    tails_graph = rydberg_graph(grid, visualize=False)
    # graph.generate_independent_sets()
    if not np.isinf(t):
        eigval, eigvec = find_ground_first_excited(graph, tails_graph, t, k=3)
        np.save('{}x{}_eigvec_{}.npy'.format(size, size, graph_index), eigvec)
"""for index in range(20):
    import sys
    #index = int(sys.argv[1])
    size = 6
    #size_indices = np.array([5, 6, 7, 8, 9, 10])
    #size_index = np.argwhere(size == size_indices)[0, 0]
    gaps_8 = np.array([0.018555264577685193, 0.3494778895114905, 1.7053519896727494, 1.7973674033959242,
                       1.3194361160893777, 0.3789663220178454, 2.155533273799165, 4.454764452556901,
                       0.677728932914647, 1.108900340765672, 1.674387563824439, np.inf,
                       np.inf, np.inf, np.inf, np.inf,
Esempio n. 7
0
           label=r'slope$=$' + str(np.round(res_linear[0], 3)),
           color='k',
           linestyle=':')
# plt.plot(times_exp, res_linear_smooth[1] * times_exp ** res_linear_smooth[0],
#         label=r'slope$=$'+str(np.round(res_linear_smooth[0], 3)), color='k', linestyle='solid')
ax[0].set_xlabel('Total depth')
ax[1].set_xlabel('Total depth')

ax[0].set_ylabel(r'$\varepsilon$')
plt.loglog()
ax[0].legend(fontsize='small', frameon=False)
ax[1].legend(fontsize='small', frameon=False)

plt.show()
# Evolve
# import sys
# index = int(sys.argv[1])
index = 0
size = 6
size_indices = np.array([5, 6, 7, 8, 9, 10])
size_index = np.argwhere(size == size_indices)[0, 0]
xls = pd.ExcelFile('MIS_degeneracy_ratio.xlsx')
graph_index = int(pd.read_excel(xls, 'Sheet1').to_numpy()[index, size_index])

graph_data = loadfile(graph_index, size)
grid = graph_data['graph_mask']
times = 2 * np.pi * 2**np.linspace(-2.5, 2, 7)

graph = unit_disk_grid_graph(grid, periodic=False, visualize=False)
find_ratio(None, graph, 60)
Esempio n. 8
0
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from qsim.graph_algorithms.graph import unit_disk_grid_graph
"""
For various lattices, compute the defect density as a function of ramp time. 
"""


def two_point_correlator(graph, state):
    """Compute the two point correlator for every pair of nodes in the graph."""
    for u in graph.nodes:
        for v in graph.nodes:
            if u != v:
                # Compute two point correlator
                for i in range(state.shape[0]):
                    pass


graph = unit_disk_grid_graph(np.ones((3, 4)), IS=True)
two_point_correlator(graph)
Esempio n. 9
0
    # Construct an operator that is zero everywhere except at the optimum
    optimum = np.zeros(cost._diagonal_hamiltonian.shape)
    optimum[optimum_indices] = 1
    optimum_overlap = cost.optimum_overlap(final_state)
    return final_state, optimum, optimum_overlap


"""
- Final state MIS distribution vs runtime
- Figure out what the Porter-Thomas distribution is
- 
"""
for i in range(50):
    arr = np.concatenate([np.ones(24), np.zeros(6)])
    np.random.shuffle(arr)
    graph = unit_disk_grid_graph(np.reshape(arr, (5, 6)))
    #print('degeneracy', graph.degeneracy, 'hilbert space size', graph.num_independent_sets)
    final_state, optimum, optimum_overlap = find_fidelity(graph, 35)
    plt.scatter(graph.degeneracy, 1 - optimum_overlap, color='navy')
    #final_state = final_state*optimum
    #final_state = final_state/np.linalg.norm(final_state)
    #final_state = (np.abs(final_state)**2).flatten()
    #final_state = np.flip(np.sort(final_state))
    #print(final_state[:100])
    #where_nonzero = np.max(np.argwhere(final_state != 0))
    #final_state = final_state[:where_nonzero+1]
    #plt.bar(np.arange(len(final_state)), final_state)
    #plt.xlabel('MIS index')
    #plt.ylabel('Probability')
    #plt.show()
plt.loglog()
Esempio n. 10
0
# What is the energy spacing
# print(res[0]*np.mean(np.diff(eigvals)[0:6]))
#plt.plot(eigvals, np.e ** (res[0] * eigvals + res[1]))
#plt.semilogy()
plt.ylabel(r'$\sum_i \int_0^1 ds |\langle j |c_i| 0 \rangle|^2$')
plt.xlabel('final independendent set size')
# plt.ylabel(r'$\sum_i|\langle j |c_i| 0 \rangle|^2$ at $\frac{t}{T}=.5$')
# plt.xlabel(r'$|E_j-E_0|$')
plt.show()"""
num = 50
n = 9
times = np.linspace(0.01, .99, num)
delta_rs = np.linspace(-1, 5, 5 * num)
from qsim.graph_algorithms.graph import unit_disk_grid_graph
arr = np.array([[1, 1, 1, 1], [1, 1, 1, 0], [1, 1, 1, 1], [0, 1, 1, 0]])
graph = unit_disk_grid_graph(arr)
rates = dissipation_over_time(times,
                              delta_rs,
                              graph=graph,
                              mode='hybrid',
                              which='S')
plt.imshow(np.log10(rates),
           interpolation=None,
           extent=[0, 1, -1, 5],
           origin='lower',
           aspect='auto',
           vmin=-9,
           vmax=0)
plt.colorbar()
plt.xlabel('time')
plt.ylabel(r'$\delta_r$')
Esempio n. 11
0
from qsim.schrodinger_equation import SchrodingerEquation
from qsim.graph_algorithms.adiabatic import SimulateAdiabatic
from qsim.tools import tools
from qsim.graph_algorithms.adiabatic import SimulateAdiabatic
from qsim.evolution.lindblad_operators import SpontaneousEmission
from matplotlib import rc
from scipy.optimize import minimize, minimize_scalar, basinhopping, brentq

grid3 = np.array([[False, False, True, True, True, True],
                  [False, True, False, True, True, True],
                  [True, True, True, True, True, False],
                  [False, True, True, True, True, True],
                  [True, True, True, True, True, True],
                  [True, True, True, True, True, False]])

graph = unit_disk_grid_graph(grid3, periodic=False, visualize=False)
tails_graph = rydberg_graph(grid3, visualize=False)
"""
Plan:
Toss 5x6 unit disk graphs with 24 nodes. Time evolve with different sweep times until you find the critical time where 
the MIS probability is 0.95. At this time, look at the final MIS probabilities.
"""


def find_critical_time(graph, critical_optimum_overlap):
    cost = hamiltonian.HamiltonianMIS(graph, IS_subspace=True)
    driver = hamiltonian.HamiltonianDriver(IS_subspace=True, graph=graph)

    def schedule(t, T):
        # Linear ramp on the detuning, experiment-like ramp on the driver
        k = 50
Esempio n. 12
0
ns = np.array(ns)
#degeneracies = np.array(degeneracies)/ns
plt.scatter(degeneracies,
            np.array(probabilities),
            color='midnightblue',
            alpha=.5)
plt.ylabel('Dimensionless leakage rate per atom')
plt.xlabel('MIS degeneracy')
#ax.yaxis.set_major_formatter(mtick.PercentFormatter())
#plt.semilogy()
#plt.show()

arr = np.array([[0, 1, 0, 0, 1], [1, 1, 1, 1, 0], [0, 1, 1, 0, 1],
                [1, 1, 1, 1, 1], [1, 1, 0, 1, 1], [1, 0, 1, 1, 0]])
print(np.sum(arr))
graph = unit_disk_grid_graph(arr, visualize=False)

stirap_energies = np.array([
    2.04678208e-10, 3.16203131e-08, 3.66905032e-07, 2.04806481e-06,
    8.02245036e-06, 2.54392431e-05, 6.99820512e-05, 1.73549410e-04,
    3.97064105e-04, 8.50513432e-04, 1.72217314e-03, 3.31793205e-03,
    6.10939598e-03, 1.07861183e-02, 1.83036771e-02, 2.99167762e-02,
    4.71864288e-02, 7.19531035e-02, 1.06272828e-01, 1.52319209e-01,
    2.12259679e-01, 2.88117986e-01, 3.81636679e-01, 4.94152967e-01,
    6.26499373e-01, 7.78937203e-01, 9.51126868e-01, 1.14213495e+00,
    1.35047432e+00, 1.57417077e+00, 1.81084845e+00, 2.05782545e+00,
    2.31221210e+00, 2.57100536e+00, 2.83117444e+00, 3.08973467e+00,
    3.34380792e+00, 3.59066933e+00, 3.82778112e+00, 4.05281486e+00,
    4.26366381e+00, 4.45844746e+00, 4.63550998e+00, 4.79341434e+00,
    4.93093385e+00, 5.04704212e+00, 5.14090289e+00, 5.21186030e+00,
    5.25943042e+00, 5.28329457e+00, 5.28674228e+00, 5.29042936e+00,