Esempio n. 1
0
import numpy as np
import time

from TNR.Models.isingModel import IsingSpinGlass
from TNR.Contractors.mergeContractor import mergeContractor
from TNR.Contractors.heuristics import loopHeuristic as heuristic

from TNR.Utilities.logger import makeLogger
from TNR import config
logger = makeLogger(__name__, config.levels['generic'])


def isingFreeEnergy(nX, J, k, accuracy):
    n = IsingSpinGlass(nX, J, k, accuracy)
    n = mergeContractor(n,
                        accuracy,
                        heuristic,
                        optimize=True,
                        merge=False,
                        plot=False)
    return n.array[1] / (nX)


J = 1
accuracy = 1e-3
size = list(range(4, 10))
k = 1.5

res = []

for s in size:
Esempio n. 2
0
import numpy as np
import operator

from TNR.Network.network import Network
from TNR.Network.node import Node
from TNR.Network.bucket import Bucket
from TNR.Network.link import Link
from TNR.Tensor.arrayTensor import ArrayTensor
from TNR.Utilities.svd import entropy, splitArray

import sys
sys.setrecursionlimit(10000)

from TNR.Utilities.logger import makeLogger
from TNR import config
logger = makeLogger(__name__, config.levels['treeNetwork'])


class TreeNetwork(Network):
    '''
    A treeNetwork is a special case of a Network in which the Network being represented
    contains no cycles. This allows matrix elements of a treeNetwork to be efficiently
    evaluated.

    As the only quantities which matter are the matrix elements, the treeNetwork may
    refactor itself through singular value decomposition (SVD) to minimize memory use, and
    so no assumptions should be made about the Nodes in this object, just the external
    Buckets.

    Internally all Nodes of a treeNetwork have Tensors of rank at most 3.
    SVD factoring is used to enforce this.
Esempio n. 3
0
import numpy as np
from numpy.linalg import svd
from scipy.linalg.interpolative import svd as svdI
from scipy.sparse.linalg import aslinearoperator
from scipy.sparse.linalg import LinearOperator
from scipy.sparse.linalg import svds
from itertools import combinations

from TNR.Utilities.arrays import permuteIndices
from TNR.Utilities.linalg import adjoint

from TNR.Utilities.logger import makeLogger
from TNR import config
logger = makeLogger(__name__, config.levels['svd'])

###################################
# Linear Operator and SVD Functions
###################################


def matrixProductLinearOperator(matrix1, matrix2):
    '''
    The reason we implement our own function here is that the dot product
    associated with the standard LinearOperator class has an extremely slow
    type-checking stage which has to be performed every time a product is calculated.
    '''

    if matrix1.shape[0] < matrix1.shape[1] or matrix2.shape[1] < matrix2.shape[
            0]:
        return np.dot(matrix1, matrix2)
Esempio n. 4
0
from TNR.Tensor.tensor import Tensor
from TNR.Tensor.arrayTensor import ArrayTensor
from TNR.Network.treeNetwork import TreeNetwork
from TNR.Network.node import Node
from TNR.Network.link import Link
from TNR.Network.bucket import Bucket
from TNR.Network.traceMin import traceMin
from TNR.Utilities.svd import entropy
from TNR.Utilities.graphPlotter import makePlotter

counter0 = 0

from TNR.Utilities.logger import makeLogger
from TNR import config
logger = makeLogger(__name__, config.levels['treeTensor'])


class TreeTensor(Tensor):
    def __init__(self, accuracy):
        self.accuracy = accuracy
        self.network = TreeNetwork(accuracy=accuracy)
        self.externalBuckets = []
        self.optimized = set()

    def addTensor(self, tensor):
        n = Node(tensor, Buckets=[Bucket() for _ in range(tensor.rank)])
        self.network.addNode(n)
        self.externalBuckets.extend(n.buckets)
        if tensor.rank > 3:
            self.network.splitNode(n)
Esempio n. 5
0
from collections import defaultdict
import networkx
import numpy as np
import operator
from collections import Counter
from scipy.sparse.csgraph import shortest_path
from scipy.sparse import csr_matrix

from TNR.Utilities.logger import makeLogger
from TNR import config
logger = makeLogger(__name__, config.levels['traceMin'])


def hortonGraph(adj, s):
    '''
    adj - Adjacency matrix for the original graph
    s - Set of edges which will cross over between the two copies of the original graph

    Edges are stored as tuples of indices.
    '''
    adjH = np.zeros((2 * len(adj), 2 * len(adj)))
    adjH[:len(adj), :len(adj)] = adj
    adjH[len(adj):, len(adj):] = adj

    for e in s:
        i, j = e
        ip = i + len(adj)
        jp = j + len(adj)

        adjH[ip, j] = adj[i, j]
        adjH[j, ip] = adj[j, i]