Beispiel #1
0
def test_vmc_construction_vstate():
    ha, sx, ma, sa, driver = _setup_vmc()

    op = nk.optimizer.Sgd(learning_rate=0.05)

    driver = nk.Vmc(ha, op, sa, nk.models.RBM(), n_samples=1000, seed=SEED)

    driver.run(1)

    assert driver.step_count == 1

    with raises(TypeError):
        ha2 = nk.operator.LocalOperator(ha.hilbert * ha.hilbert)
        driver = nk.Vmc(ha2, op, variational_state=driver.state)
Beispiel #2
0
def _setup_vmc(n_samples=200,
               diag_shift=0,
               use_iterative=False,
               lsq_solver=None,
               **kwargs):
    L = 8
    nk.random.seed(SEED)
    hi = nk.hilbert.Spin(s=0.5)**L

    ma = nk.machine.RbmSpin(hilbert=hi, alpha=1)
    ma.init_random_parameters(sigma=0.01, seed=SEED)

    ha = nk.operator.Ising(hi, nk.graph.Hypercube(length=L, n_dim=1), h=1.0)
    sa = nk.sampler.MetropolisLocal(machine=ma)

    op = nk.optimizer.Sgd(ma, learning_rate=0.1)
    sr = nk.optimizer.SR(ma,
                         use_iterative=use_iterative,
                         diag_shift=diag_shift,
                         lsq_solver=lsq_solver)

    vmc = nk.Vmc(ha, sa, op, n_samples=n_samples, **kwargs)

    # Add custom observable
    X = [[0, 1], [1, 0]]
    sx = nk.operator.LocalOperator(hi, [X] * 8, [[i] for i in range(8)])

    return ma, vmc, sx
Beispiel #3
0
def _vmc(n_iter=20):
    hi = nk.hilbert.Spin(s=0.5) ** L

    ma = nk.models.RBM(alpha=1)

    ha = nk.operator.Ising(hi, nk.graph.Hypercube(length=L, n_dim=1), h=1.0)
    sa = nk.sampler.MetropolisLocal(hi)
    vs = nk.vqs.MCState(sa, ma, n_samples=500, seed=SEED)

    op = nk.optimizer.Sgd(learning_rate=0.1)

    return nk.Vmc(hamiltonian=ha, variational_state=vs, optimizer=op)
Beispiel #4
0
def test_mps(diag):
    L = 6
    g = nk.graph.Hypercube(length=L, n_dim=1)
    hi = nk.hilbert.Spin(s=0.5, N=g.n_nodes)

    ma = nk.models.MPSPeriodic(hilbert=hi, graph=g, bond_dim=2, diag=diag)
    sa = nk.sampler.MetropolisLocal(hilbert=hi, n_chains=16)

    vs = nk.vqs.MCState(sa, ma, n_samples=1000)

    ha = nk.operator.Ising(hi, graph=g, h=1.0)
    op = nk.optimizer.Sgd(learning_rate=0.05)

    driver = nk.Vmc(ha, op, variational_state=vs)

    driver.run(3)
Beispiel #5
0
def run(h, j, l, n_iterations, a, n_samp):
    # 1D Lattice
    g = nk.graph.Hypercube(length=l, n_dim=1, pbc=True)

    # Hilbert space of spins on the graph
    hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

    # Ising spin hamiltonian
    ha = nk.operator.Ising(h=h, hilbert=hi, graph=g, J=j)

    # RBM Spin Machine
    ma = nk.machine.RbmSpin(alpha=a,
                            hilbert=hi,
                            use_visible_bias=False,
                            use_hidden_bias=False)

    ma.init_random_parameters(seed=1234, sigma=0.01)

    # Metropolis Local Sampling
    sa = nk.sampler.MetropolisLocal(ma, n_chains=32)

    # Optimizer
    op = nk.optimizer.Sgd(ma, learning_rate=0.1)

    # Stochastic Reconfiguration
    sr = nk.optimizer.SR(ma, diag_shift=0.05)

    # Create the optimization driver
    gs = nk.Vmc(hamiltonian=ha,
                sampler=sa,
                optimizer=op,
                n_samples=n_samp,
                n_discard=20)

    # Run the optimization for 300 iterations
    gs.run(n_iter=n_iterations, out="DataMATRIX5_J1_1D_H_{:.1f}".format(h))
    s = ma.to_array()
    normalised = s.real / (sum(s.real))
    #normalised = np.sqrt(s.real)

    #normalised = 1
    #plt.plot(normalised)
    params = ma.parameters.imag
    #weights = params[:-24]
    print(np.shape(params))
    return normalised, params
Beispiel #6
0
    def reset(self):
        comm = MPI.COMM_WORLD
        rank = comm.Get_rank()
        self._set_graph()
        if rank == 0:
            sys.stdout.write("Graph one the {:d} vertices.\n".format(
                self.graph.n_sites))
            sys.stdout.flush()
        self.n_spins = self.graph.n_sites
        self.hilbert = nk.hilbert.Spin(graph=self.graph, s=0.5)
        self.machine = nk.machine.RbmSpin(self.hilbert, alpha=4)
        self.machine.init_random_parameters(seed=42, sigma=1.0e-2)
        self.sampler = nk.sampler.MetropolisLocal(self.machine)
        self._set_operator()
        self.optimizer = nk.optimizer.RmsProp()

        use_cholesky = self.machine.n_par < 10000

        self.vmc = nk.Vmc(
            hamiltonian=self.hamiltonian,
            sampler=self.sampler,
            optimizer=self.optimizer,
            n_samples=max([2000, self.n_spins * 50]),
            sr=nk.optimizer.SR(
                lsq_solver="LLT",
                diag_shift=1.0e-2,
                use_iterative=not use_cholesky,
                is_holomorphic=self.sampler.machine.is_holomorphic,
            ),
        )

        if rank == 0:
            sys.stdout.write("RBM with {:d} params.\n".format(
                self.machine.n_par))
            sys.stdout.write(self.vmc.info())
            sys.stdout.write("\n")
            sys.stdout.flush()

        self.corr_operators = {}

        for i in range(self.n_spins):
            for j in range(self.n_spins):
                self.corr_operators["{:d}-{:d}".format(
                    i, j)] = sigmaz(self.hilbert, i) * sigmaz(self.hilbert, j)
        self.correlations = []
Beispiel #7
0
def _vmc(n_iter=20):
    nk.random.seed(SEED)
    hi = nk.hilbert.Spin(s=0.5)**L

    ma = nk.machine.RbmSpin(hilbert=hi, alpha=1)
    ma.init_random_parameters(sigma=0.01, seed=SEED)

    ha = nk.operator.Ising(hi, nk.graph.Hypercube(length=L, n_dim=1), h=1.0)
    sa = nk.sampler.MetropolisLocal(machine=ma)

    op = nk.optimizer.Sgd(ma, learning_rate=0.1)

    return nk.Vmc(hamiltonian=ha, sampler=sa, optimizer=op, n_samples=500)

    st = time.time()
    vmc.run(n_iter, callback=callbacks)
    runtime = time.time() - st
    return vmc.step_count, runtime
Beispiel #8
0
def _setup_vmc(lsq_solver=None):
    L = 4
    g = nk.graph.Hypercube(length=L, n_dim=1)
    hi = nk.hilbert.Spin(s=0.5, N=g.n_nodes)

    ma = nk.machine.RbmSpin(hilbert=hi, alpha=1)
    ma.init_random_parameters(sigma=0.01)

    ha = nk.operator.Ising(hi, graph=g, h=1.0)
    sa = nk.sampler.ExactSampler(machine=ma, sample_size=16)
    op = nk.optimizer.Sgd(ma, learning_rate=0.05)

    # Add custom observable
    X = [[0, 1], [1, 0]]
    sx = nk.operator.LocalOperator(hi, [X] * L, [[i] for i in range(8)])

    sr = nk.optimizer.SR(ma, use_iterative=False, lsq_solver=lsq_solver)
    driver = nk.Vmc(ha, sa, op, 1000, sr=sr)

    return ha, sx, ma, sa, driver
Beispiel #9
0
def run_vmc(steps, step_size, diag_shift, n_samples):
    opt = jaxopt.sgd(step_size)
    # opt = nk.optimizer.Sgd(step_size)

    sr = nk.optimizer.SR(lsq_solver="BDCSVD", diag_shift=diag_shift)
    sr.store_rank_enabled = False  # not supported by BDCSVD
    sr.store_covariance_matrix_enabled = True

    vmc = nk.Vmc(
        hamiltonian=ha,
        sampler=sa,
        optimizer=opt,
        n_samples=n_samples,
        n_discard=min(n_samples // 10, 200),
        sr=sr,
    )

    if mpi_rank == 0:
        print(vmc.info())
        print(HEADER_STRING)

    for step in vmc.iter(steps, 1):
        output(vmc, step)
Beispiel #10
0
def _setup_vmc(dtype=np.float32, sr=True):
    L = 4
    g = nk.graph.Hypercube(length=L, n_dim=1)
    hi = nk.hilbert.Spin(s=0.5, N=g.n_nodes)

    ma = nk.models.RBM(alpha=1, dtype=dtype)
    sa = nk.sampler.ExactSampler(hilbert=hi, n_chains=16)

    vs = nk.variational.MCState(sa, ma, n_samples=1000, seed=SEED)

    ha = nk.operator.Ising(hi, graph=g, h=1.0)
    op = nk.optimizer.Sgd(learning_rate=0.05)

    # Add custom observable
    X = [[0, 1], [1, 0]]
    sx = nk.operator.LocalOperator(hi, [X] * L, [[i] for i in range(8)])

    if sr:
        sr_config = nk.optimizer.SR()
    else:
        sr_config = None
    driver = nk.Vmc(ha, op, variational_state=vs, sr=sr_config)

    return ha, sx, vs, sa, driver
Beispiel #11
0
def load(L=__L__,
         alpha=__alpha__,
         sr=None,
         dataname=None,
         path='run',
         machine_name='JaxRBM',
         sampler='Local',
         hamiltonian_name='transformed_Heisenberg',
         n_samples=10000,
         n_iterations=20):
    """Method to load a pretrained machine and continue the training.

        A hamiltonian and sampler can be chosen. The machine is defined and trained for the hamiltonian.

            Args:
                L (int) : The number of sites of the lattice
                alpha (int) : A factor to define the size of different machines
                sr (float) : The parameter for stochastic reconfiguration method. If it is None, stochastic reconfiguration is not used
                dataname (str) : The dataname. If None, an automatic dataname is chosen
                path (str) : The directory, where the results are saved. If None, the directory is 'run'
                machine_name (str) A string to choose the machine. Possible inputs: See get_machine in my_machines.py
                sampler (str) : A string to choose the sampler: Recommended: 'Local' (this works with every machine)
                hamiltonian_name (str) : A string to choose the hamiltonian. Possible inputs: see get_hamiltonian in my_models.py
                n_samples (int) : The number of samples used in every iteration step
                n_iterations (int) : The number of iterations (training steps)

                """
    if (dataname == None):
        dataname = ''.join(('L', str(L)))
    dataname = functions.create_path(dataname, path=path)
    ha, hi, g = models.get_hamiltonian(hamiltonian_name, L)
    print('uses', hamiltonian_name, 'hamiltonian')
    sys.stdout.flush()
    print('load the machine: ', dataname)
    generate_machine = machines.get_machine(machine_name)
    ma, op, sa, machine_name = generate_machine(hilbert=hi,
                                                hamiltonian=ha,
                                                alpha=alpha)
    ma.load(''.join((dataname, '.wf')))
    op, sa = machines.load_machine(machine=ma,
                                   hamiltonian=ha,
                                   optimizer='Adamax',
                                   lr=0.001,
                                   sampler=sampler)
    #observables = functions.get_operator(hilbert=hi, L=L, operator='FerroCorr', symmetric=True)
    #observables = {**functions.get_operator(hilbert=hi, L=L, operator='FerroCorr', symmetric=False), **functions.get_operator(hilbert=hi, L=L, operator='FerroCorr', symmetric=True)}
    if (sr == None):
        gs2 = nk.Vmc(hamiltonian=ha,
                     sampler=sa,
                     optimizer=op,
                     n_samples=n_samples)  #, n_discard=5000)
    else:
        sr = nk.optimizer.SR(ma, diag_shift=sr)
        gs2 = nk.Vmc(hamiltonian=ha,
                     sampler=sa,
                     optimizer=op,
                     n_samples=n_samples,
                     sr=sr)  #, n_discard=5000)

    functions.create_machinefile(machine_name, L, alpha, dataname, sr)
    start = time.time()
    #gs2.run(n_iter=n_iterations, out=''.join((dataname, '_load')), obs=observables, write_every=4, save_params_every=4)
    gs2.run(n_iter=n_iterations,
            out=dataname,
            write_every=10,
            save_params_every=10)
    end = time.time()
    with open(''.join((dataname, '.time')), 'a') as reader:
        reader.write(str(end - start))
    print('Time', end - start)
    sys.stdout.flush()
Beispiel #12
0
def test_deprecated_vmc_name():
    ha, sx, ma, sa, driver = _setup_vmc()
    op = nk.optimizer.Sgd(learning_rate=0.05)

    with pytest.warns(FutureWarning):
        driver = nk.Vmc(ha, op, sa, nk.models.RBM(), n_samples=1000, seed=SEED)
Beispiel #13
0
    ######################################
    # Initialize RBM with lattice symmetry
    ######################################

    n_autom = len(g.automorphisms())
    ma = nk.machine.RbmSpinSymm(hilbert=hi,
                                alpha=alpha_sym * n_autom // N)  # Machine
    ma.init_random_parameters(seed=1234, sigma=0.01)
    assert (ma._ws.shape == (N, alpha_sym))

    opt = nk.optimizer.Sgd(ma, learning_rate=sgd_lr)  # Optimizer
    sa = nk.sampler.MetropolisLocal(machine=ma)  # Metropolis Local Sampling
    sr = nk.optimizer.SR(ma, diag_shift=0.1)  # Stochastic Reconfiguration
    gs = nk.Vmc(  # VMC object
        hamiltonian=ha,
        sampler=sa,
        optimizer=opt,
        n_samples=1000,
        sr=sr)

    ################################################
    # Optimize RBM parameters to obtain ground state
    ################################################

    if args.direct_calc:
        assert os.path.isfile("data/{}_opt_{}.log".format(pyfile, str_params))
        output(">>>> RBMSym optimization skipped\n", logfile)
    else:
        output(">>>> RBMSym optimization\n", logfile)

        start = time.time()
        gs.run(out="data/{}_opt_{}".format(pyfile, str_params),
Beispiel #14
0
# 1D Lattice
g = nk.graph.Hypercube(length=20, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=0.5, graph=g)

# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)

# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(ma, n_chains=32)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# Stochastic reconfiguration
sr = nk.optimizer.SR(diag_shift=0.1)
gs = nk.Vmc(hamiltonian=ha,
            sampler=sa,
            optimizer=op,
            n_samples=10,
            sr=sr,
            n_discard=5)

gs.run(n_iter=3)
# Hilbert space of spins on the graph
# with total Sz equal to 0
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes, total_sz=0)

# Heisenberg hamiltonian
ha = nk.operator.Heisenberg(hilbert=hi)

# Symmetric RBM Spin Machine
ma = nk.machine.JastrowSymm(hilbert=hi, automorphisms=g, dtype=float)
ma.init_random_parameters(seed=1234, sigma=0.01)

# Metropolis Exchange Sampling
# Notice that this sampler exchanges two neighboring sites
# thus preservers the total magnetization
sa = nk.sampler.MetropolisExchange(machine=ma)

# Optimizer
op = nk.optimizer.Sgd(ma, learning_rate=0.05)

# Stochastic reconfiguration
gs = nk.Vmc(
    hamiltonian=ha,
    sampler=sa,
    optimizer=op,
    n_samples=1000,
    sr=nk.optimizer.SR(diag_shift=0.1, lsq_solver="QR"),
)

gs.run(out="test", n_iter=300)
Beispiel #16
0
# 1D Lattice
L = 20
g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=0.5, graph=g)

ha = nk.operator.Ising(h=1.0, hilbert=hi)

alpha = 1
ma = nk.machine.JaxRbm(hi, alpha, dtype=float)
ma.init_random_parameters(seed=1232)

# Jax Sampler
sa = nk.sampler.MetropolisLocal(machine=ma, n_chains=2)

# Using Sgd
op = nk.optimizer.Sgd(ma, learning_rate=0.1)


# Create the optimization driver
gs = nk.Vmc(
    hamiltonian=ha, sampler=sa, optimizer=op, n_samples=1000, sr=None, n_discard=None
)

# The first iteration is slower because of start-up jit times
gs.run(out="test", n_iter=2)

gs.run(out="test", n_iter=300)
Beispiel #17
0
# Spin based Hilbert Space
hi = nk.hilbert.Spin(s=0.5, total_sz=0.0, graph=g)

# Custom Hamiltonian operator
ha = nk.operator.LocalOperator(hi)
for mat, site in zip(mats, sites):
    ha += nk.operator.LocalOperator(hi, mat, site)

# Restricted Boltzmann Machine
ma = nk.machine.RbmSpin(hi, alpha=1, symmetry=True)
ma.init_random_parameters(seed=1234, sigma=0.01)

# Exchange Sampler randomly exchange up to next-to-nearest neighbours
sa = nk.sampler.MetropolisExchange(machine=ma, n_chains=16, d_max=2)

# Optimizer
opt = nk.optimizer.Sgd(ma, learning_rate=0.02)

# Stochastic reconfiguration
sr = nk.optimizer.SR(ma, diag_shift=0.1)

# Variational Monte Carlo
gs = nk.Vmc(hamiltonian=op,
            sampler=sa,
            optimizer=opt,
            sr=sr,
            n_samples=4000,
            n_discard=5)

vmc.run(n_iter=300, out="test")
Beispiel #18
0
def Dimer_RBM(h, V, length, alpha, n_iter, n_samples, n_chains, n_discard,
              sweep_size):

    kernel = 1
    n_iter = 300
    # sweep_size = 200
    decay_factor = 'sigmoid decay'  # or 'sigmoid decay'
    n_jobs = 12
    n_discard = 300

    name = 'h={}V={}l={}'.format(h, V, length)

    g = nk.graph.Graph(nodes=[i for i in range(length[0] * length[1] * 2)])
    hi = nk.hilbert.Spin(s=0.5, graph=g)

    ham = f.dimer_hamiltonian(V=V, h=h, length=np.array(length))
    op_transition1 = f.dimer_flip1(length=np.array(length))

    hex_ = nk.machine.new_hex(np.array(length))

    ma = nk.machine.RbmDimer(hi,
                             hex_,
                             alpha=alpha,
                             symmetry=True,
                             use_hidden_bias=False,
                             use_visible_bias=False,
                             dtype=float,
                             reverse=True)
    ma.init_random_parameters(seed=1234)

    sa_mul = nk.sampler.DimerMetropolisLocal_multi(machine=ma,
                                                   op=op_transition1,
                                                   length=length,
                                                   n_chains=n_chains,
                                                   sweep_size=sweep_size,
                                                   kernel=1,
                                                   n_jobs=n_jobs)

    sr = nk.optimizer.SR(ma, diag_shift=0)
    opt = nk.optimizer.Sgd(ma,
                           learning_rate=0.05,
                           decay_factor=decay_factor,
                           N=n_iter)

    gs = nk.Vmc(
        hamiltonian=ham,
        sampler=sa_mul,
        optimizer=opt,
        n_samples=n_samples,
        sr=sr,
        n_discard=n_discard,
    )

    gs.run(n_iter=n_iter, out=parentdir + '/log/' + name)

    # slight modification with large sample and large seep_size

    sweep_size = sweep_size * 3
    n_samples = n_samples * 3
    n_iter = 100
    n_discard = 600

    sa_mul = nk.sampler.DimerMetropolisLocal_multi(machine=ma,
                                                   op=op_transition1,
                                                   length=length,
                                                   n_chains=n_chains,
                                                   sweep_size=sweep_size,
                                                   kernel=1,
                                                   n_jobs=n_jobs)

    sr = nk.optimizer.SR(ma, diag_shift=0)
    opt = nk.optimizer.Sgd(ma, learning_rate=0.01, decay_factor=1, N=n_iter)

    gs = nk.Vmc(
        hamiltonian=ham,
        sampler=sa_mul,
        optimizer=opt,
        n_samples=n_samples,
        sr=sr,
        n_discard=n_discard,
    )

    gs.run(n_iter=n_iter, out=parentdir + '/log/' + name + '2')

    ma.save(parentdir + '/save/ma/' + name)
Beispiel #19
0
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)

input_size = hi.size
alpha = 1

model = torch.nn.Sequential(
    torch.nn.Linear(input_size, alpha * input_size),
    torch.nn.ReLU(),
    torch.nn.Linear(alpha * input_size, 2),
    torch.nn.ReLU(),
)

ma = nk.machine.Torch(model, hilbert=hi)

ma.parameters = 0.1 * (np.random.randn(ma.n_par))

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma, n_chains=8)

# Optimizer
op = nk.optimizer.Sgd(0.1)

# Stochastic reconfiguration
sr = nk.optimizer.SR(diag_shift=0.1, use_iterative=True)

# Driver
gs = nk.Vmc(hamiltonian=ha, sampler=sa, optimizer=op, n_samples=500, sr=sr)

gs.run(n_iter=300, out="test")
Beispiel #20
0
g = nk.graph.Hypercube(length=8, n_dim=1, pbc=True)

# Boson Hilbert Space
hi = nk.hilbert.Boson(N=g.n_nodes, n_max=3, n_bosons=8)

# Bose Hubbard Hamiltonian
ha = nk.operator.BoseHubbard(hilbert=hi, graph=g, U=4.0)

# RBM Machine with one-hot encoding, real parameters, and symmetries
ma = nk.machine.RbmMultiVal(hilbert=hi, alpha=1, dtype=float, automorphisms=g)
ma.init_random_parameters(seed=1234, sigma=0.01)

# Sampler using Hamiltonian moves, thus preserving the total number of particles
sa = nk.sampler.MetropolisHamiltonian(machine=ma,
                                      hamiltonian=ha,
                                      batch_size=16)

# Stochastic gradient descent optimization
op = nk.optimizer.Sgd(ma, 0.05)

# Variational Monte Carlo
sr = nk.optimizer.SR(ma, diag_shift=0.1)
vmc = nk.Vmc(hamiltonian=ha,
             sampler=sa,
             optimizer=op,
             n_samples=4000,
             n_discard=0,
             sr=sr)

vmc.run(n_iter=300, out="test")
Beispiel #21
0
# 2D Lattice
g = nk.graph.Hypercube(length=5, n_dim=2, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=0.5, graph=g)

# Ising spin hamiltonian at the critical point
ha = nk.operator.Ising(h=3.0, hilbert=hi)

# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)

# Optimizer
op = nk.optimizer.Sgd(ma, learning_rate=0.1)

# Stochastic Reconfiguration
sr = nk.optimizer.SR(ma, diag_shift=0.1)

# Stochastic reconfiguration
gs = nk.Vmc(hamiltonian=ha, sampler=sa, optimizer=op, sr=sr, n_samples=1000)

# Create a JSON output file, and overwrite if file exists
logger = nk.logging.JsonLog("test", "w")

# Run the optimization
gs.run(n_iter=1000, out=logger)
Beispiel #22
0
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)

# RBM Spin Machine
ma = nk.machine.RbmSpinReal(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# Stochastic reconfiguration
gs = nk.Vmc(
    hamiltonian=ha,
    sampler=sa,
    optimizer=op,
    n_samples=1000,
    sr=nk.optimizer.SR(ma, diag_shift=0.1),
)

# Adding an observable
# The sum of sigma_x on all sites
X = [[0, 1], [1, 0]]
sx = nk.operator.LocalOperator(hi, [X] * L, [[i] for i in range(L)])
obs = {"SigmaX": sx}

gs.run(n_iter=300, out="test", obs=obs)
Beispiel #23
0
def run(L=__L__,
        alpha=__alpha__,
        sr=None,
        dataname=None,
        path='run',
        machine_name='JaxRBM',
        sampler='Local',
        hamiltonian_name='transformed_Heisenberg',
        n_samples=__number_samples__,
        n_iterations=__number_iterations__):
    """Method to train a machine.

        A hamiltonian and sampler can be chosen. The machine is defined and trained for the hamiltonian.

            Args:
                L (int) : The number of sites of the lattice.
                alpha (int) : A factor to define the size of different machines.
                sr (float) : The parameter for stochastic reconfiguration method. If it is None, stochastic reconfiguration is not used.
                dataname (str) : The dataname. If None, an automatic dataname is chosen
                path (str) : The directory, where the results are saved. If None, the directory is 'run'
                machine_name (str) A string to choose the machine. Possible inputs: See get_machine in my_machines.py
                sampler (str) : A string to choose the sampler: Recommended: 'Local' (this works with every machine)
                hamiltonian_name (str) : A string to choose the hamiltonian. Possible inputs: see get_hamiltonian in my_models.py
                n_samples (int) : The number of samples used in every iteration step
                n_iterations (int) : The number of iterations (training steps)
                                                    """
    ha, hi, g = models.get_hamiltonian(hamiltonian_name, L)
    print('uses', hamiltonian_name, 'hamiltonian')
    sys.stdout.flush()
    generate_machine = machines.get_machine(machine_name)
    ma, op, sa, machine_name = generate_machine(hilbert=hi,
                                                hamiltonian=ha,
                                                alpha=alpha,
                                                optimizer='Adamax',
                                                lr=0.005,
                                                sampler=sampler)

    if (sr == None):
        gs = nk.Vmc(hamiltonian=ha,
                    sampler=sa,
                    optimizer=op,
                    n_samples=n_samples)
    else:
        sr = nk.optimizer.SR(ma, diag_shift=sr)
        gs = nk.Vmc(hamiltonian=ha,
                    sampler=sa,
                    optimizer=op,
                    n_samples=n_samples,
                    sr=sr)

    if (dataname == None):
        dataname = ''.join(('L', str(L)))
    dataname = functions.create_path(dataname, path=path)
    print('')
    functions.create_machinefile(machine_name, L, alpha, dataname, sr)
    start = time.time()
    gs.run(n_iter=int(n_iterations), out=dataname)
    end = time.time()
    with open(''.join((dataname, '.time')), 'w') as reader:
        reader.write(str(end - start))
    print('Time', end - start)
    sys.stdout.flush()