Esempio n. 1
0
def test_vmc_sr_legacy_api():
    ha, sx, ma, sampler, driver = _setup_vmc(sr=True)
    op = driver.optimizer
    vs = driver.state
    sr_config = driver.preconditioner

    with pytest.warns(FutureWarning):
        driver = nk.VMC(
            ha,
            op,
            variational_state=vs,
            sr=sr_config,
        )

    with pytest.warns(FutureWarning):
        driver = nk.VMC(
            ha,
            op,
            variational_state=vs,
            preconditioner=sr_config,
            sr_restart=True,
        )

    with pytest.raises(ValueError):
        driver = nk.VMC(ha,
                        op,
                        variational_state=vs,
                        sr=sr_config,
                        preconditioner=sr_config)
Esempio n. 2
0
def test_vmc_construction_vstate():
    ha, sx, ma, sa, driver = _setup_vmc()

    op = nk.optimizer.Sgd(learning_rate=0.05)

    driver = nk.VMC(ha, op, sa, nk.models.RBM(), n_samples=1000, seed=SEED)

    driver.run(1)

    assert driver.step_count == 1

    with raises(TypeError):
        ha2 = nk.operator.LocalOperator(ha.hilbert * ha.hilbert)
        driver = nk.VMC(ha2, op, variational_state=driver.state)
Esempio n. 3
0
def test_RBMSymm(use_hidden_bias, use_visible_bias, symmetries):
    g, hi, perms = _setup_symm(symmetries, N=8)

    ma = nk.models.RBMSymm(
        symmetries=perms,
        alpha=4,
        use_visible_bias=use_visible_bias,
        use_hidden_bias=use_hidden_bias,
        hidden_bias_init=nk.nn.initializers.uniform(),
        visible_bias_init=nk.nn.initializers.uniform(),
    )
    pars = ma.init(nk.jax.PRNGKey(), hi.random_state(nk.jax.PRNGKey()))

    print(pars)

    v = hi.random_state(jax.random.PRNGKey(1), 3)
    vals = [ma.apply(pars, v[..., p]) for p in np.asarray(perms)]

    for val in vals:
        assert jnp.allclose(val, vals[0])

    vmc = nk.VMC(
        nk.operator.Ising(hi, g, h=1.0),
        nk.optim.Sgd(0.1),
        nk.sampler.MetropolisLocal(hi),
        ma,
    )
    vmc.advance(1)
Esempio n. 4
0
def test_gcnn(mode, complex_output):
    lattice = nk.graph.Chain
    symmetries = "trans"
    parity = True
    g, hi, perms = _setup_symm(symmetries, N=3, lattice=lattice)

    ma = nk.models.GCNN(
        symmetries=perms,
        mode=mode,
        shape=tuple(g.extent),
        layers=2,
        features=2,
        parity=parity,
        bias_init=uniform(),
        complex_output=complex_output,
    )

    vmc = nk.VMC(
        nk.operator.Ising(hi, g, h=1.0),
        nk.optimizer.Sgd(0.1),
        nk.sampler.MetropolisLocal(hi, n_chains=2, n_sweeps=2),
        ma,
        n_samples=8,
    )
    vmc.advance(1)
Esempio n. 5
0
def test_gcnn(parity, symmetries, lattice, mode):
    g, hi, perms = _setup_symm(symmetries, N=3, lattice=lattice)

    ma = nk.models.GCNN(
        symmetries=perms,
        mode=mode,
        shape=tuple(g.extent),
        layers=2,
        features=2,
        parity=parity,
        bias_init=nk.nn.initializers.uniform(),
    )

    pars = ma.init(nk.jax.PRNGKey(), hi.random_state(nk.jax.PRNGKey(), 1))

    v = hi.random_state(jax.random.PRNGKey(0), 3)
    vals = [ma.apply(pars, v[..., p]) for p in np.asarray(perms)]

    for val in vals:
        assert jnp.allclose(val, vals[0])

    vmc = nk.VMC(
        nk.operator.Ising(hi, g, h=1.0),
        nk.optim.Sgd(0.1),
        nk.sampler.MetropolisLocal(hi),
        ma,
    )
    vmc.advance(1)
Esempio n. 6
0
def _vmc(n_iter=20):
    hi = nk.hilbert.Spin(s=0.5)**L

    ma = nk.models.RBM(alpha=1)

    ha = nk.operator.Ising(hi, nk.graph.Hypercube(length=L, n_dim=1), h=1.0)
    sa = nk.sampler.MetropolisLocal(hi)
    vs = nk.vqs.MCState(sa, ma, n_samples=500, seed=SEED)

    op = nk.optimizer.Sgd(learning_rate=0.1)

    return nk.VMC(hamiltonian=ha, variational_state=vs, optimizer=op)
Esempio n. 7
0
def test_AR_VMC(partial_models, hilbert, dtype):
    model1 = partial_models[0](hilbert, dtype)
    model2 = partial_models[1](hilbert, dtype)

    sampler1 = nk.sampler.ARDirectSampler(hilbert, n_chains=3)
    vstate1 = nk.vqs.MCState(sampler1,
                             model1,
                             n_samples=6,
                             seed=123,
                             sampler_seed=456)
    assert vstate1.n_discard_per_chain == 0
    samples1 = vstate1.sample()

    graph = nk.graph.Hypercube(length=hilbert.size, n_dim=1)
    H = nk.operator.Ising(hilbert=hilbert, graph=graph, h=1)
    optimizer = optax.adam(learning_rate=1e-3)
    vmc1 = nk.VMC(H, optimizer, variational_state=vstate1)
    vmc1.run(n_iter=3)
    samples_trained1 = vstate1.sample()

    sampler2 = nk.sampler.ARDirectSampler(hilbert, n_chains=3)
    vstate2 = nk.vqs.MCState(sampler2,
                             model2,
                             n_samples=6,
                             seed=123,
                             sampler_seed=456)
    samples2 = vstate2.sample()

    # Samples from FastARNN* should be the same as those from ARNN*
    np.testing.assert_allclose(samples2, samples1)

    vmc2 = nk.VMC(H, optimizer, variational_state=vstate2)
    vmc2.run(n_iter=3)
    samples_trained2 = vstate2.sample()

    # Samples from FastARNN* after training should be the same as those from ARNN*
    np.testing.assert_allclose(samples_trained2, samples_trained1)
Esempio n. 8
0
def test_Jastrow(dtype):
    N = 8
    hi = nk.hilbert.Spin(1 / 2, N)
    g = nk.graph.Chain(N)

    ma = nk.models.Jastrow(dtype=dtype)
    pars = ma.init(nk.jax.PRNGKey(), hi.random_state(1))

    vmc = nk.VMC(
        nk.operator.Ising(hi, g, h=1.0),
        nk.optim.Sgd(0.1),
        nk.sampler.MetropolisLocal(hi),
        ma,
    )
    vmc.advance(1)
Esempio n. 9
0
def test_mps(diag):
    L = 6
    g = nk.graph.Hypercube(length=L, n_dim=1)
    hi = nk.hilbert.Spin(s=0.5, N=g.n_nodes)

    ma = nk.models.MPSPeriodic(hilbert=hi, graph=g, bond_dim=2, diag=diag)
    sa = nk.sampler.MetropolisLocal(hilbert=hi, n_chains=16)

    vs = nk.vqs.MCState(sa, ma)

    ha = nk.operator.Ising(hi, graph=g, h=1.0)
    op = nk.optimizer.Sgd(learning_rate=0.05)

    driver = nk.VMC(ha, op, variational_state=vs)

    driver.run(1)
Esempio n. 10
0
def test_AR_VMC(s):
    L = 4

    graph = nk.graph.Hypercube(length=L, n_dim=1)
    hilbert = nk.hilbert.Spin(s=s, N=L)
    model = nk.models.ARNNDense(hilbert=hilbert, layers=3, features=5)
    sampler = nk.sampler.ARDirectSampler(hilbert, n_chains=3)

    vstate = nk.vqs.MCState(sampler, model, n_samples=6)
    assert vstate.n_discard_per_chain == 0
    vstate.sample()

    H = nk.operator.Ising(hilbert=hilbert, graph=graph, h=1)
    optimizer = optax.adam(learning_rate=1e-3)
    vmc = nk.VMC(H, optimizer, variational_state=vstate)
    vmc.run(n_iter=3)
Esempio n. 11
0
def test_RBMMultiVal(use_hidden_bias, use_visible_bias):
    N = 8
    M = 3
    hi = nk.hilbert.Fock(M, N)
    g = nk.graph.Chain(N)

    ma = nk.models.RBMMultiVal(
        alpha=2,
        n_classes=M + 1,
        use_visible_bias=use_visible_bias,
        use_hidden_bias=use_hidden_bias,
        hidden_bias_init=nk.nn.initializers.uniform(),
        visible_bias_init=nk.nn.initializers.uniform(),
    )
    _ = ma.init(nk.jax.PRNGKey(), hi.random_state(nk.jax.PRNGKey(), 1))

    vmc = nk.VMC(
        nk.operator.BoseHubbard(hi, g, U=1.0),
        nk.optim.Sgd(0.1),
        nk.sampler.MetropolisLocal(hi),
        ma,
    )
    vmc.advance(1)
Esempio n. 12
0
def _setup_vmc(dtype=np.float32, sr=True):
    L = 4
    g = nk.graph.Hypercube(length=L, n_dim=1)
    hi = nk.hilbert.Spin(s=0.5, N=g.n_nodes)

    ma = nk.models.RBM(alpha=1, dtype=dtype)
    sa = nk.sampler.ExactSampler(hilbert=hi)

    vs = nk.vqs.MCState(sa, ma, n_samples=1000, seed=SEED)

    ha = nk.operator.Ising(hi, graph=g, h=1.0)
    op = nk.optimizer.Sgd(learning_rate=0.05)

    # Add custom observable
    X = [[0, 1], [1, 0]]
    sx = nk.operator.LocalOperator(hi, [X] * L, [[i] for i in range(8)])

    if sr:
        sr_config = nk.optimizer.SR()
    else:
        sr_config = None
    driver = nk.VMC(ha, op, variational_state=vs, preconditioner=sr_config)

    return ha, sx, vs, sa, driver
Esempio n. 13
0
g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

# Ising spin hamiltonian
ha = nk.operator.Ising(hilbert=hi, graph=g, h=0.0)

# RBM Spin Machine
ma = nk.models.Jastrow(dtype=np.float64)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.05)
sr = nk.optimizer.SR(diag_shift=0.01)

# Variational monte carlo driver
gs = nk.VMC(ha, op, sa, ma, n_samples=8000, sr=sr)

# Run the optimization for 300 iterations
gs.run(
    n_iter=300,
    out="test",
    # stop if variance is essentially zero (= reached eigenstate)
    callback=nk.callbacks.EarlyStopping(
        monitor="variance", baseline=1e-12, patience=np.infty
    ),
)
Esempio n. 14
0
# 1D Lattice
L = 20
g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

# Ising spin hamiltonian
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)

# RBM Spin Machine
ma = nk.models.RBM(alpha=1, dtype=float)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# SR
sr = nk.optimizer.SR(diag_shift=0.01)

# Variational state
vs = nk.variational.MCState(sa, ma, n_samples=1000, n_discard=100)

# Variational monte carlo driver with a variational state
gs = nk.VMC(ha, op, variational_state=vs, preconditioner=sr)

# Run the optimization for 300 iterations
gs.run(n_iter=300, out=None)
Esempio n. 15
0
def forward_fn(x):
    model = MyLinear(10)
    return jnp.squeeze(jnp.sum(model(x), axis=-1))


ma = hk.transform(forward_fn)

# 1D Lattice
L = 20  # 10

g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

# Ising spin hamiltonian
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# Variational monte carlo driver
gs = nk.VMC(ha, op, sa, ma, n_samples=1000, n_discard=50)

# Run the optimization for 300 iterations
gs.run(n_iter=300, out="test")
Esempio n. 16
0
# 1D Lattice
L = 20
g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

# Ising spin hamiltonian
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)

# RBM Spin Machine
ma = nk.models.RBM(alpha=1, dtype=float)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# SR
sr = nk.optimizer.SR(diag_shift=0.01)

# Variational state
vs = nk.variational.MCState(sa, ma, n_samples=1000, n_discard=100)

# Variational monte carlo driver with a variational state
gs = nk.VMC(ha, op, variational_state=vs, sr=sr)

# Run the optimization for 300 iterations
gs.run(n_iter=300, out=None)
Esempio n. 17
0
# See the License for the specific language governing permissions and
# limitations under the License.

import netket as nk

# 1D Lattice
L = 20  # 10

g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

# Ising spin hamiltonian
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)

# RBM Spin Machine
ma = nk.models.RBM(alpha=1, use_visible_bias=True, dtype=float)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# Variational monte carlo driver
gs = nk.VMC(ha, op, sa, ma, n_samples=1000)

# Run the optimization for 300 iterations
gs.run(n_iter=300, out="test")
Esempio n. 18
0
ma = nk.models.RBM(alpha=1, use_visible_bias=True, dtype=complex)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisHamiltonian(hi, ha, n_chains=16)

# Variational state
vs = nk.vqs.MCState(sa, ma, n_samples=1024, n_discard_per_chain=16)

# Optimizer
op = nk.optimizer.Sgd(0.01)
sr = nk.optimizer.SR(diag_shift=1e-4)

# Variational monte carlo driver
gs = nk.VMC(ha,
            op,
            variational_state=vs,
            n_samples=1000,
            n_discard_per_chain=50)

# Create observable
Sx = sum([nk.operator.spin.sigmax(hi, i) for i in range(L)])

# Run the optimization for 300 iterations to determine the ground state, used as
# initial state of the time-evolution
gs.run(n_iter=300, out="example_ising1d_GS", obs={"Sx": Sx})

# Create integrator for time propagation
integrator = nkx.dynamics.RK23(dt=0.01, adaptive=True, rtol=1e-3, atol=1e-3)
print(integrator)

# Quenched hamiltonian: this has a different transverse field than `ha`
Esempio n. 19
0
# RBM Spin Machine
ma = nk.models.RBMSymm(
    symmetries=g.translations(),
    alpha=4,
    use_visible_bias=False,
    use_hidden_bias=True,
    dtype=float,
)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisExchange(hi, graph=g, n_chains=16)

# Optimizer
op = nk.optim.Sgd(learning_rate=0.01)
sr = nk.optim.SR(diag_shift=0.1)

# Variational monte carlo driver
gs = nk.VMC(ha,
            op,
            sa,
            ma,
            n_samples=1000,
            n_discard_per_chain=100,
            preconditioner=sr)

# Print parameter structure
print(f"# variational parameters: {gs.state.n_parameters}")

# Run the optimization for 300 iterations
gs.run(n_iter=300, out="test")
Esempio n. 20
0
def forward_fn(x):
    model = MyLinear(10)
    return jnp.squeeze(jnp.sum(model(x), axis=-1))


ma = hk.transform(forward_fn)

# 1D Lattice
L = 20  # 10

g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)

# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)

# Ising spin hamiltonian
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)

# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)

# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)

# Variational monte carlo driver
gs = nk.VMC(ha, op, sa, ma, n_samples=1000, n_discard_per_chain=50)

# Run the optimization for 300 iterations
gs.run(n_iter=300, out="test")