Ejemplo n.º 1
0
    def outer_product(self, physin_label="physin", physout_label="physout"):
        """
        Take the outer product of this PEPS with itself, returning a PEPO. 
        The outer product of each  tensor in the PEPS is taken and 
        virtual indices are consolidated. Returns an instance of SquareLatticePEPO."""
        tensor_array = []
        for row in range(self.shape[0]):
            new_row = []
            for col in range(self.shape[1]):
                #This takes the outer product of two tensors
                #Without contracting any indices
                outer = tn.contract(self[row, col], self[row, col], [], [])
                #Replace the first physical label with physin label
                outer.labels[outer.labels.index(
                    self.phys_label)] = physin_label
                #Replace the second physical label with physin label
                outer.labels[outer.labels.index(
                    self.phys_label)] = physout_label

                #Consolidate indices
                outer.consolidate_indices(labels=[
                    self.left_label, self.right_label, self.up_label,
                    self.down_label
                ])

                new_row.append(outer)
            tensor_array.append(new_row)

        return SquareLatticePEPO(tensor_array,
                                 up_label=self.up_label,
                                 down_label=self.down_label,
                                 right_label=self.right_label,
                                 left_label=self.left_label,
                                 physin_label=physin_label,
                                 physout_label=physout_label)
def extract_core_tensor_via_common_features(data_tensor, left_features, right_features):
    """
    Perform dimensionality reduction of the pre-tensorized data_tensor by contraction with pre-exisiting left
    and right features. This is normally used to extract features from test or out-of-sample data given a mixed
    canonical mps decomposition of the training data (where the left and right features are tensor obtained
    from the left and right canonical parts of the MPS respectively)

    See "Optimal Feature Extraction and Classification of Tensors via Matrix Product State Decomposition"
    (https://arxiv.org/abs/1503.00516v2) for details of the algorithm.

    Note: In this implementation the tensorization of the data_tensor _must_ match the tensorization of the
    pre-existing MPS. It is preferable rather use "extract_core_tensor_via_common_features_from_matrix" which
    automatically takes care of the tensorization of the data from which the features should be extracted (i.e
    ensures matching of the partition with the pre-existing MPS)

    :param data_tensor: The tensorized data that we want to be reduced via MCA. The partition must match the MCS.
    :param left_features: The list of left tensors from the previously obtained mixed canonical state
    :param right_features: The list of right tensors from the previously obtained mixed canonical state
    :return data-reduced: The extracted reduced data set
    """
    
    left_consolidated = reconstruct_to_tensor(left_features)
    left_consolidated.replace_label("b", "l")
    left_consolidated.move_index("l", 0)

    right_consolidated = reconstruct_to_tensor(right_features)
    right_consolidated.replace_label("a", "r")
    
    batch_size_position = np.size(left_features)
    partition_size = np.size(data_tensor.labels)

    label_list = ["batchsize"]
    label_list.extend([str(j + 1) for j in range(partition_size - 1)])

    right_labeling = label_list[batch_size_position + 1:partition_size]
    left_labeling = label_list[1:batch_size_position + 1]
    right_consolidated.replace_label([right_consolidated.labels[j] for j in range(1, np.size(right_consolidated.labels))], right_labeling)
    left_consolidated.replace_label([left_consolidated.labels[j] for j in range(1, np.size(left_consolidated.labels))], left_labeling)

    extracted_core_int = tn.contract(left_consolidated, data_tensor, left_labeling, left_labeling)
    extracted_core = tn.contract(right_consolidated, extracted_core_int, right_labeling, right_labeling)
    extracted_core.move_index("l", 0)
    extracted_core.replace_label("batchsize", "c")

    data_reduced = tn.tensor_to_matrix(extracted_core, extracted_core.labels[2])

    return data_reduced
Ejemplo n.º 3
0
def contract_func2(tensor0, tensor_list, edge_list, n):
    temp, temp_edge_list = contract_local(tensor_list, n)
    tensor_result = tn.contract(temp, tensor0, temp_edge_list, edge_list)
    tensor_result.data = tensor_result.data.transpose(0, 2, 1)
    tensor_result.labels[1], tensor_result.labels[2] = tensor_result.labels[
        2], tensor_result.labels[1]
    for i in range(tensor_result.shape[2]):
        tensor_result.data[:, :, i] = preprocessing.normalize(
            tensor_result.data[:, :, i], axis=0, norm='l2')
    return tensor_result
Ejemplo n.º 4
0
    def contract_special(self, tensor0, tensor1, lab1, tensor2, lab2, tensor3, lab3, Num):
        temp = self.contract_local3(tensor1, tensor2, tensor3, Num)
        tensor_result = tn.contract(
            tensor0, temp, [lab1, lab2, lab3], ["a", "b", "c"])
        tensor_result.data = tensor_result.data.transpose(1, 0, 2)
        tensor_result.labels[0], tensor_result.labels[1] = tensor_result.labels[1], tensor_result.labels[0]

        for i in range(tensor_result.shape[1]):  # normalization
            tensor_result.data[:, i, :] = preprocessing.normalize(
                tensor_result.data[:, i, :], axis=0, norm='l2')
        return tensor_result
Ejemplo n.º 5
0
    def contract_unit(self, tensor0, tensor1, tensor2, tensor3, tensor4, Num):
        temp = self.contract_local(tensor1, tensor2, tensor3, tensor4, Num)
        tensor_result = tn.contract(
            tensor0, temp, ["1", "2", "3", "4"], ["a", "b", "c", "d"])

        if len(tensor_result.shape) == 2:
            tensor_result.data = preprocessing.normalize(
                tensor_result.data, axis=0, norm='l2')  # normalization
        else:
            for i in range(tensor_result.shape[1]):     # normalization
                tensor_result.data[:, i, :] = preprocessing.normalize(
                    tensor_result.data[:, i, :], axis=0, norm='l2')
        return tensor_result
Ejemplo n.º 6
0
def contract_func0(tensor0, tensor1, tensor2, tensor3, tensor4, n):
    temp = contract_func1(tensor1, tensor2, tensor3, tensor4, n)
    tensor_result = tn.contract(temp, tensor0, ["D00", "D01", "D10", "D11"],
                                ["D00", "D01", "D10", "D11"])

    if len(tensor_result.shape) == 2:
        tensor_result.data = preprocessing.normalize(tensor_result.data,
                                                     axis=0,
                                                     norm='l2')
    else:
        for i in range(tensor_result.shape[2]):
            tensor_result.data[:, :, i] = preprocessing.normalize(
                tensor_result.data[:, :, i], axis=0, norm='l2')
    return tensor_result
    def test(self, test_tensor, label_test_tensor):
        Num = test_tensor[0][0].shape[1]
        for j, k in product(range(8), range(8)):
            self.contracted[1][j][k] = self.contract_unit(self.tn_layers[1][j][k],
                                                          test_tensor[2 *
                                                                      j][2 * k],
                                                          test_tensor[2 *
                                                                      j][2 * k + 1],
                                                          test_tensor[2 *
                                                                      j + 1][2 * k],
                                                          test_tensor[2 * j + 1][2 * k + 1], Num)
        for i in range(2, 5):
            for j in (range(self.layer_units[i])):
                for k in (range(self.layer_units[i])):
                    self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k],
                                                                  self.contracted[i -
                                                                                  1][2 * j][2 * k],
                                                                  self.contracted[i - 1][2 * j][
                        2 * k + 1],
                        self.contracted[i - 1][2 * j + 1][
                        2 * k],
                        self.contracted[i - 1][2 * j + 1][
                        2 * k + 1], Num)

        # option 1
        temp = tn.contract(
            self.contracted[4][0][0], label_test_tensor, "up", "down")
        temp.trace("up", "down")
        acc1 = temp.data / Num

        # option 2
        count = 0
        for i in range(Num):
            # 这里目前因为负数存在问题
            x = np.argmax(abs(self.contracted[4][0][0].data[:, i]))
            print(np.array(self.contracted[4][0][0].data).shape)
            print(np.array(self.contracted[4][0][0].data[:, i]))
            for j in range(2):
                if j == x:
                    self.contracted[4][0][0].data[j, i] = 1
                else:
                    self.contracted[4][0][0].data[j, i] = 0
            print(self.contracted[4][0][0].data[:, i] == label_test_tensor.data[i, :])
            if (self.contracted[4][0][0].data[:, i] == label_test_tensor.data[i, :]).all():
                count = count + 1

        acc2 = count / Num

        return acc1, acc2
Ejemplo n.º 8
0
def contract_func3(tensor0, tensor_list, n):
    channel = len(tensor_list)
    elist = ['C' + str(i) for i in range(channel)]
    temp = contract_func4(tensor_list, n)

    tensor_result = tn.contract(temp, tensor0, elist, elist)

    if len(tensor_result.shape) == 2:
        tensor_result.data = preprocessing.normalize(tensor_result.data,
                                                     axis=0,
                                                     norm='l2')
    else:
        for i in range(tensor_result.shape[2]):
            tensor_result.data[:, :, i] = preprocessing.normalize(
                tensor_result.data[:, :, i], axis=0, norm='l2')
    return tensor_result
Ejemplo n.º 9
0
def outer_product_peps(peps1,
                       peps2,
                       physin_label="physin",
                       physout_label="physout"):
    """Return the outer product of two PEPS networks i.e. if `peps1` and
    `peps2` correspond to two PEPS |a> and |b>  then outer_product_peps(peps1,
    peps2) returns the density operator corresponding to |a><b|, where "physin"
    is the physical index associated with <b|" and "physout" is associated with
    |a>.  Assumes that input PEPS are the same size. The output physin label
    replaces the phys label of `peps2` and the output label physout replaces
    the phys label  of `peps1`."""
    #TODO input PEPS must have the same left right up down labels. Check this
    #TODO careful for conflicting phys labels of peps1 and peps2
    if peps1.shape != peps2.shape:
        raise ValueError("Peps input do not have same dimension.")
    tensor_array = []
    for row in range(peps1.shape[0]):
        new_row = []
        for col in range(peps1.shape[1]):
            #This takes the outer product of two tensors
            #Without contracting any indices
            outer = tn.contract(peps1[row, col],
                                tn.tensor.conjugate(peps2[row, col]), [], [])
            #Replace the physical label of peps1 with  physout label
            outer.labels[outer.labels.index(peps1.phys_label)] = physout_label
            #Replace the physical label of peps2 with physin label
            outer.labels[outer.labels.index(peps2.phys_label)] = physin_label

            #Consolidate indices
            outer.consolidate_indices(labels=[
                peps1.left_label, peps1.right_label, peps1.up_label,
                peps1.down_label
            ])

            new_row.append(outer)
        tensor_array.append(new_row)

    return SquareLatticePEPO(tensor_array,
                             up_label=peps1.up_label,
                             down_label=peps1.down_label,
                             right_label=peps1.right_label,
                             left_label=peps1.left_label,
                             physin_label=physin_label,
                             physout_label=physout_label)
def reconstruct_to_tensor(mps):
    """
    Reconstructs a multi-dimensional tensor from an MPS (matrix product state).

    :param mps: A matrix product state.
    :return tensor_form: the high dimensional tensor corresponding to the MPS.
    """

    num_cores = np.size(mps)

    tensor_form = mps[0].copy()
    tensor_form.replace_label(["c"], ["0"])

    for j in range(1, num_cores):
        tensor_form = tn.contract(tensor_form, mps[j], "b", "a")
        tensor_form.replace_label(["c"], [str(j)])

    tensor_form.remove_all_dummy_indices(labels=None)

    return tensor_form
Ejemplo n.º 11
0
    def update_single_w_channel_tensor(self, y, n, cl, ci, cj, cc):
        path = self.get_path_1(cl, ci, cj, cc)
        for l in range(self.num_layers):
            if cl == l:
                for i, j, c in product(range(self.layer_size[l]), range(self.layer_size[l]), range(self.channel)):
                    if (not self.flag_contract[l][i][j][c]) and (i != ci or j != cj or c != cc):
                        self.contracted[l+1][i][j][c] = contract_func0(self.ttn_tensor_list[l][i][j][c],
                                                                       self.contracted[l][2*i][2*j][c],
                                                                       self.contracted[l][2*i][2*j+1][c],
                                                                       self.contracted[l][2*i+1][2*j][c],
                                                                       self.contracted[l][2*i+1][2*j+1][c], n)
                        self.flag_contract[l][i][j][c] = True
                        if l == self.layer_comb_channel-1:
                            self.flag_contract[l+1][i][j] = False
                        else:
                            self.flag_contract[l+1][i//2][j//2][c] = False
                self.contracted[cl+1][ci][cj][cc] = contract_func1(self.contracted[cl][2*ci][2*cj][cc],
                                                                   self.contracted[cl][2*ci][2*cj+1][cc],
                                                                   self.contracted[cl][2*ci+1][2*cj][cc],
                                                                   self.contracted[cl][2*ci+1][2*cj+1][cc],n)
                self.flag_contract[cl][ci][cj][cc] = False
                if cl == self.layer_comb_channel-1:
                    self.flag_contract[cl+1][ci][cj] = False
                else:
                    self.flag_contract[cl+1][ci//2][cj//2][cc] = False
            elif l < self.layer_comb_channel:
                for i, j, c in product(range(self.layer_size[l]), range(self.layer_size[l]), range(self.channel)):
                    if not self.flag_contract[l][i][j][c]:
                        if([l,i,j,c] in path) and (l-1==cl):
                            if ci%2==0 and cj%2==0:
                                contracted_list = [self.contracted[l][ci][cj+1][c],self.contracted[l][ci+1][cj][c],self.contracted[l][ci+1][cj+1][c]]
                                edge_list = ['D01', 'D10', 'D11']
                            elif ci%2==0 and cj%2==1:
                                contracted_list = [self.contracted[l][ci][cj-1][c],self.contracted[l][ci+1][cj-1][c],self.contracted[l][ci+1][cj][c]]
                                edge_list = ['D00', 'D10', 'D11']
                            elif ci%2==1 and cj%2==0:
                                contracted_list = [self.contracted[l][ci-1][cj][c],self.contracted[l][ci-1][cj+1][c],self.contracted[l][ci][cj+1][c]]
                                edge_list = ['D00', 'D01', 'D11']
                            else:
                                contracted_list = [self.contracted[l][ci-1][cj-1][c],self.contracted[l][ci-1][cj][c],self.contracted[l][ci][cj-1][c]]
                                edge_list = ['D00', 'D01', 'D10']
                            self.contracted[l+1][i][j][c] = contract_func2(self.ttn_tensor_list[l][i][j][c], contracted_list, edge_list, n)
                            self.flag_contract[l][i][j][c] = False
                            if l == self.layer_comb_channel-1:
                                self.flag_contract[l+1][i][j] = False
                            else:
                                self.flag_contract[l+1][i//2][j//2][c] = False
                        else:
                            self.contracted[l+1][i][j][c] = contract_func0(self.ttn_tensor_list[l][i][j][c],
                                                                           self.contracted[l][2*i][2*j][c],
                                                                           self.contracted[l][2*i][2*j+1][c],
                                                                           self.contracted[l][2*i+1][2*j][c],
                                                                           self.contracted[l][2*i+1][2*j+1][c], n)
                            self.flag_contract[l][i][j][c] = (not ([l,i,j,c] in path))
                            if l == self.layer_comb_channel-1:
                                self.flag_contract[l+1][i][j] = False
                            else:
                                self.flag_contract[l+1][i//2][j//2][c] = False
            elif l == self.layer_comb_channel:
                for i, j in product(range(self.layer_size[l]), range(self.layer_size[l])):
                    if not self.flag_contract[l][i][j]:
                        if([l,i,j] in path) and (l-1==cl):
                            contracted_list = copy.deepcopy(self.contracted[l][i][j])
                            edge_list = ['C'+str(i) for i in range(self.channel)]
                            contracted_list.pop(cc)
                            edge_list.pop(cc)
                            self.contracted[l+1][i][j] = contract_func2(self.ttn_tensor_list[l][i][j], contracted_list, edge_list, n)
                            self.flag_contract[l][i][j] = False
                            if l < self.num_layers-1:
                                self.flag_contract[l+1][i//2][j//2] = False
                        else:
                            self.contracted[l+1][i][j] = contract_func3(self.ttn_tensor_list[l][i][j],
                                                                        self.contracted[l][i][j], n)
                            self.flag_contract[l][i][j] = (not ([l,i,j] in path))
                            if l < self.num_layers-1:
                                self.flag_contract[l+1][i//2][j//2] = False
            elif l > self.layer_comb_channel:
                for i, j in product(range(self.layer_size[l]), range(self.layer_size[l])):
                    if not self.flag_contract[l][i][j]:
                        self.contracted[l+1][i][j] = contract_func0(self.ttn_tensor_list[l][i][j],
                                                                    self.contracted[l][2*i][2*j],
                                                                    self.contracted[l][2*i][2*j+1],
                                                                    self.contracted[l][2*i+1][2*j],
                                                                    self.contracted[l][2*i+1][2*j+1], n)
                        self.flag_contract[l][i][j] = (not ([l,i,j] in path))
                        if l < self.num_layers-1:
                            self.flag_contract[l+1][i//2][j//2] = False

        tempD = tn.zeros_tensor([n, self.bond_inner], labels=['S', 'B'])
        for s, b in product(range(n), range(self.bond_inner)):
            sum1 = 0
            for f in range(self.bond_label):
                sum1 = sum1 + self.contracted[self.num_layers][0][0].data[s,f,b] * y.data[s,f]
            tempD.data[s, b] = sum1

        tensor_environment = tn.contract(self.contracted[cl+1][ci][cj][cc], tempD, ["S"], ["S"])

        bond_in = self.bond_data if cl == 0 else self.bond_inner
        matrix = np.reshape(tensor_environment.data,
                (bond_in*bond_in*bond_in*bond_in, self.bond_inner))
        u, sigma, vt = np.linalg.svd(matrix, 0)
        self.ttn_tensor_list[cl][ci][cj][cc].data = np.reshape(
                np.dot(u, vt), (bond_in,bond_in,bond_in,bond_in,self.bond_inner))
Ejemplo n.º 12
0
    def update_singletensor(self, c_i, c_j, c_k):

        path_len = 5 - c_i
        path = [[c_i, c_j, c_k]]
        tem_c_j = c_j
        tem_c_k = c_k
        for i in range(1, path_len):
            tem_c_j = tem_c_j // 2
            tem_c_k = tem_c_k // 2
            path.append([c_i + i, tem_c_j, tem_c_k])

        for i in range(1, 5):
            if i == c_i:
                for j, k in product(range(self.layer_units[i]), range(self.layer_units[i])):
                    if (self.flag_contract[i, j, k] == 0) and ((j != c_j) or (k != c_k)):
                        self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k], self.contracted[i - 1][2 * j][2 * k], self.contracted[(
                            i - 1)][2 * j][2 * k + 1], self.contracted[i - 1][2 * j + 1][2 * k], self.contracted[i - 1][2 * j + 1][2 * k + 1], self.n_train)
                        self.flag_contract[i, j, k] = 1
                        if i < 4:
                            self.flag_contract[i + 1, j // 2, k // 2] = 0
                self.contracted[c_i][c_j][c_k] = self.contract_local(self.contracted[c_i - 1][2 * c_j][2 * c_k], self.contracted[(
                    c_i - 1)][2 * c_j][2 * c_k + 1], self.contracted[c_i - 1][2 * c_j + 1][2 * c_k], self.contracted[c_i - 1][2 * c_j + 1][2 * c_k + 1], self.n_train)
                self.flag_contract[c_i, c_j, c_k] = 0
                if i < 4:
                    self.flag_contract[c_i + 1, c_j // 2, c_k // 2] = 0
            else:
                for j, k in product(range(self.layer_units[i]), range(self.layer_units[i])):
                    if self.flag_contract[i, j, k] == 0:
                        if ([i, j, k] in path) and ((i - 1) == c_i):
                            if (c_j % 2 == 0) and (c_k % 2 == 0):
                                [lab1, lab2, lab3] = ["2", "3", "4"]
                                tensor1 = self.contracted[c_i][c_j][c_k + 1]
                                tensor2 = self.contracted[c_i][c_j + 1][c_k]
                                tensor3 = self.contracted[c_i][c_j + 1][c_k + 1]

                            if (c_j % 2 == 0) and (c_k % 2 == 1):
                                [lab1, lab2, lab3] = ["1", "3", "4"]
                                tensor1 = self.contracted[c_i][c_j][c_k - 1]
                                tensor2 = self.contracted[c_i][c_j + 1][c_k - 1]
                                tensor3 = self.contracted[c_i][c_j + 1][c_k]

                            if (c_j % 2 == 1) and (c_k % 2 == 0):
                                [lab1, lab2, lab3] = ["1", "2", "4"]
                                tensor1 = self.contracted[c_i][c_j - 1][c_k]
                                tensor2 = self.contracted[c_i][c_j - 1][c_k + 1]
                                tensor3 = self.contracted[c_i][c_j][c_k + 1]

                            if (c_j % 2 == 1) and (c_k % 2 == 1):
                                [lab1, lab2, lab3] = ["1", "2", "3"]
                                tensor1 = self.contracted[c_i][c_j - 1][c_k - 1]
                                tensor2 = self.contracted[c_i][c_j - 1][c_k]
                                tensor3 = self.contracted[c_i][c_j][c_k - 1]

                            self.contracted[i][j][k] = self.contract_special(
                                self.tn_layers[i][j][k], tensor1, lab1, tensor2, lab2, tensor3, lab3, self.n_train)
                            self.flag_contract[i, j, k] = 0
                            if i < 4:
                                self.flag_contract[i + 1, j // 2, k // 2] = 0

                        else:
                            # print(i,j,k)
                            self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k], self.contracted[i - 1][2 * j][2 * k], self.contracted[
                                i - 1][2 * j][2 * k + 1], self.contracted[i - 1][2 * j + 1][2 * k], self.contracted[i - 1][2 * j + 1][2 * k + 1], self.n_train)
                            if ([i, j, k] in path):
                                self.flag_contract[i, j, k] = 0
                            else:
                                self.flag_contract[i, j, k] = 1
                            if i < 4:
                                self.flag_contract[i + 1, j // 2, k // 2] = 0
        if c_i != 4:

            bond = self.contracted[c_i][c_j][c_k].shape[0]
            tensor_environment = tn.random_tensor(
                bond, bond, bond, bond, self.bond_inner, labels=['e1', 'e2', 'e3', 'e4', 'eup'])
            for i, j, k, l, m in product(range(bond), range(bond), range(bond), range(bond), range(self.bond_inner)):
                sum1 = sum(self.contracted[c_i][c_j][c_k].data[i, j, k, l, g] * self.contracted[4][0][0].data[f, m, g] * self.labels.data[g, f]
                           for f in range(self.bond_label) for g in range(self.n_train))
                tensor_environment.data[i, j, k, l, m] = sum1

        else:
            tensor_environment = tn.contract(
                self.contracted[4][0][0], self.labels, "down", "up")

        if c_i == 1:
            matrix = np.reshape(tensor_environment.data, (self.bond_data *
                                                          self.bond_data * self.bond_data * self.bond_data, self.bond_inner))
            u, sigma, vt = la.svd(matrix, 0)
            self.tn_layers[c_i][c_j][c_k].data = np.reshape(
                np.dot(u, vt), (self.bond_data, self.bond_data, self.bond_data, self.bond_data, self.bond_inner))
        else:
            if c_i == 4:
                matrix = np.reshape(tensor_environment.data, (self.bond_inner *
                                                              self.bond_inner * self.bond_inner * self.bond_inner, self.bond_label))
                u, sigma, vt = la.svd(matrix, 0)
                self.tn_layers[c_i][c_j][c_k].data = np.reshape(
                    np.dot(u, vt), (self.bond_inner, self.bond_inner, self.bond_inner, self.bond_inner, self.bond_label))
            else:
                matrix = np.reshape(tensor_environment.data, (self.bond_inner *
                                                              self.bond_inner * self.bond_inner * self.bond_inner, self.bond_inner))
                u, sigma, vt = la.svd(matrix, 0)
                self.tn_layers[c_i][c_j][c_k].data = np.reshape(
                    np.dot(u, vt), (self.bond_inner, self.bond_inner, self.bond_inner, self.bond_inner, self.bond_inner))

        # compute the training accuracy-------------------------------------------
        j = c_j
        k = c_k
        for i in range(c_i, 5):
            self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k],
                                                          self.contracted[i -
                                                                          1][2 * j][2 * k],
                                                          self.contracted[i -
                                                                          1][2 * j][2 * k + 1],
                                                          self.contracted[i -
                                                                          1][2 * j + 1][2 * k],
                                                          self.contracted[i - 1][2 * j + 1][2 * k + 1], self.n_train)
            j = j // 2
            k = k // 2

        temp = tn.contract(self.contracted[4][0][0], self.labels, "up", "down")
        temp.trace("up", "down")
        acc = temp.data / self.n_train
        return acc
    def update_singletensor(self, c_i, c_j, c_k):
        # c_i为层数,c_j为张量位置
        path_len = 5 - c_i
        path = [[c_i, c_j, c_k]]
        tem_c_j = c_j
        tem_c_k = c_k
        for i in range(1, path_len):
            # //表示整数除法,返回int型
            tem_c_j = tem_c_j // 2
            tem_c_k = tem_c_k // 2
            path.append([c_i + i, tem_c_j, tem_c_k])
        # 更新contracted,即各层的张量
        for i in range(1, 5):
            if i == c_i:
                # 遍历长宽进行更新
                for j, k in product(range(self.layer_units[i]), range(self.layer_units[i])):
                    if (self.flag_contract[i, j, k] == 0) and ((j != c_j) or (k != c_k)):
                        x =  self.contracted[i - 1][2 * j][2 * k]
                        y = self.contracted[i - 1][2 * j][2 * k]
                        self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k], self.contracted[i - 1][2 * j][2 * k], self.contracted[(
                            i - 1)][2 * j][2 * k + 1], self.contracted[i - 1][2 * j + 1][2 * k], self.contracted[i - 1][2 * j + 1][2 * k + 1], self.n_train)
                        self.flag_contract[i, j, k] = 1
                        if i < 4:
                            self.flag_contract[i + 1, j // 2, k // 2] = 0
                self.contracted[c_i][c_j][c_k] = self.contract_local(self.contracted[c_i - 1][2 * c_j][2 * c_k], self.contracted[(
                    c_i - 1)][2 * c_j][2 * c_k + 1], self.contracted[c_i - 1][2 * c_j + 1][2 * c_k], self.contracted[c_i - 1][2 * c_j + 1][2 * c_k + 1], self.n_train)
                self.flag_contract[c_i, c_j, c_k] = 0
                if i < 4:
                    self.flag_contract[c_i + 1, c_j // 2, c_k // 2] = 0
            else:
                for j, k in product(range(self.layer_units[i]), range(self.layer_units[i])):
                    if self.flag_contract[i, j, k] == 0:
                        if ([i, j, k] in path) and ((i - 1) == c_i):
                            if (c_j % 2 == 0) and (c_k % 2 == 0):
                                [lab1, lab2, lab3] = ["2", "3", "4"]
                                tensor1 = self.contracted[c_i][c_j][c_k + 1]
                                tensor2 = self.contracted[c_i][c_j + 1][c_k]
                                tensor3 = self.contracted[c_i][c_j + 1][c_k + 1]

                            if (c_j % 2 == 0) and (c_k % 2 == 1):
                                [lab1, lab2, lab3] = ["1", "3", "4"]
                                tensor1 = self.contracted[c_i][c_j][c_k - 1]
                                tensor2 = self.contracted[c_i][c_j + 1][c_k - 1]
                                tensor3 = self.contracted[c_i][c_j + 1][c_k]

                            if (c_j % 2 == 1) and (c_k % 2 == 0):
                                [lab1, lab2, lab3] = ["1", "2", "4"]
                                tensor1 = self.contracted[c_i][c_j - 1][c_k]
                                tensor2 = self.contracted[c_i][c_j - 1][c_k + 1]
                                tensor3 = self.contracted[c_i][c_j][c_k + 1]

                            if (c_j % 2 == 1) and (c_k % 2 == 1):
                                [lab1, lab2, lab3] = ["1", "2", "3"]
                                tensor1 = self.contracted[c_i][c_j - 1][c_k - 1]
                                tensor2 = self.contracted[c_i][c_j - 1][c_k]
                                tensor3 = self.contracted[c_i][c_j][c_k - 1]

                            self.contracted[i][j][k] = self.contract_special(
                                self.tn_layers[i][j][k], tensor1, lab1, tensor2, lab2, tensor3, lab3, self.n_train)
                            self.flag_contract[i, j, k] = 0
                            if i < 4:
                                self.flag_contract[i + 1, j // 2, k // 2] = 0

                        else:
                            # print(i,j,k)
                            self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k], self.contracted[i - 1][2 * j][2 * k], self.contracted[
                                i - 1][2 * j][2 * k + 1], self.contracted[i - 1][2 * j + 1][2 * k], self.contracted[i - 1][2 * j + 1][2 * k + 1], self.n_train)
                            if ([i, j, k] in path):
                                self.flag_contract[i, j, k] = 0
                            else:
                                self.flag_contract[i, j, k] = 1
                            if i < 4:
                                self.flag_contract[i + 1, j // 2, k // 2] = 0
        # 计算环境张量E
        if c_i != 4:

            bond = self.contracted[c_i][c_j][c_k].shape[0]             
                
            tempD = tn.zeros_tensor([self.bond_inner, self.n_train], labels=['m', 'g'])
            for m, g in product(range(self.bond_inner), range(self.n_train)):
                sum1 = 0
                for f in range(self.bond_label):
                    sum1 = sum1 + self.contracted[4][0][0].data[f, m, g] * self.labels.data[g, f]
                tempD.data[m, g] = sum1

            tensor_environment = tn.contract(self.contracted[c_i][c_j][c_k], tempD, ["down"], ["g"])

        else:
            tensor_environment = tn.contract(
                self.contracted[4][0][0], self.labels, "down", "up")
        # 根据不同层数更新tn_layers
        if c_i == 1:
            matrix = np.reshape(tensor_environment.data, (self.bond_data *
                                                          self.bond_data * self.bond_data * self.bond_data, self.bond_inner))
            u, sigma, vt = la.svd(matrix, 0)
            # 更新T张量
            self.tn_layers[c_i][c_j][c_k].data = np.reshape(
                np.dot(u, vt), (self.bond_data, self.bond_data, self.bond_data, self.bond_data, self.bond_inner))
        else:
            if c_i == 4:
                matrix = np.reshape(tensor_environment.data, (self.bond_inner *
                                                              self.bond_inner * self.bond_inner * self.bond_inner, self.bond_label))
                u, sigma, vt = la.svd(matrix, 0)
                self.tn_layers[c_i][c_j][c_k].data = np.reshape(
                    np.dot(u, vt), (self.bond_inner, self.bond_inner, self.bond_inner, self.bond_inner, self.bond_label))
            else:
                matrix = np.reshape(tensor_environment.data, (self.bond_inner *
                                                              self.bond_inner * self.bond_inner * self.bond_inner, self.bond_inner))
                u, sigma, vt = la.svd(matrix, 0)
                self.tn_layers[c_i][c_j][c_k].data = np.reshape(
                    np.dot(u, vt), (self.bond_inner, self.bond_inner, self.bond_inner, self.bond_inner, self.bond_inner))

        # compute the training accuracy-------------------------------------------
        j = c_j
        k = c_k
        # 从当前层到最后一层更新对应位置张量
        for i in range(c_i, 5):
            self.contracted[i][j][k] = self.contract_unit(self.tn_layers[i][j][k],
                                                          self.contracted[i -
                                                                          1][2 * j][2 * k],
                                                          self.contracted[i -
                                                                          1][2 * j][2 * k + 1],
                                                          self.contracted[i -
                                                                          1][2 * j + 1][2 * k],
                                                          self.contracted[i - 1][2 * j + 1][2 * k + 1], self.n_train)
            j = j // 2
            k = k // 2

        temp = tn.contract(self.contracted[4][0][0], self.labels, "up", "down")
        # 求矩阵的迹
        temp.trace("up", "down")
        acc = temp.data / self.n_train
        return acc
Ejemplo n.º 14
0
def con(*args):
    """
    Contract a network of tensors. Similar purpose to NCON, described in
    arxiv.org/abs/1402.0939, but designed to work with the Tensor objects of
    tncontract.

    Examples
    --------

    >>> import tncontract as tn

    For the examples below, we define three tensors

    >>> A = tn.Tensor(np.random.rand(3,2,4), labels=["a", "b", "c"])
    >>> B = tn.Tensor(np.random.rand(3,4), labels=["d", "e"])
    >>> C = tn.Tensor(np.random.rand(5,5,2), labels=["f", "g", "h"])

    Contract a pair indices between two tensors 
    -------------------------------------------
    The following contracts  pairs of indices "a","d" and "c","e" of tensors
    `A` and `B`. It is identical to A["a", "c"]*B["d", "e"]

    >>> tn.con(A, B, ("a", "d" ), ("c", "e")) 
    Tensor object: shape = (2), labels = ["b"]

    Contract a pair of indices beloning to one tensor (internal edges)
    ------------------------------------------------------------------
    The following contracts the "f" and "g" indices of tensor `C`

    >>> t.con(C, ("f", "g"))
    Tensor object: shape = (2), labels = ["h"]

    Return the tensor product of a pair of tensors
    ----------------------------------------------
    After all indices have been contracted, `con` will return the tensor
    product of the disconnected components of the tensor contraction. The
    following example returns the tensor product of `A` and `B`. 

    >>> tn.con(A, B) 
    Tensor object: shape = (3, 2, 4, 3, 4), labels = ["a", "b", "c", "d", "e"]

    Contract a network of several tensors
    -------------------------------------
    It is possible to contract a network of several tensors. Internal edges are
    contracted first then edges connecting separate tensors, and then the
    tensor product is taken of the disconnected components resulting from the
    contraction. Edges between separate tensors are contracted in the order
    they appear in the argument list. The result of the example below is a
    scalar (since all indices will be contracted). 

    >>> tn.con(A, B, C, ("a", "d" ), ("c", "e"), ("f", "g"), ("h", "b"))  

    Notes
    -----
    Lists of tensors and index pairs for contraction may be used as arguments. 
    The following example contracts 100 rank 2 tensors in a ring with periodic
    boundary conditions. 
    >>> N=100
    >>> A = tn.Tensor(np.random.rand(2,2), labels=["left","right"])
    >>> tensor_list = [A.suf(str(i)) for i in range(N)]
    >>> idx_pairs = [("right"+str(j), "left"+str(j+1)) for j in range(N-1)]
    >>> tn.con(tensor_list, idx_pairs, ("right"+str(N-1), "left0"))
    """

    tensor_list = []
    contract_list = []
    for x in args:
        #Can take lists of tensors/contraction pairs as arguments
        if isinstance(x, list):
            if isinstance(x[0], tn.Tensor):
                tensor_list.extend(x)
            else:
                contract_list.extend(x)

        elif isinstance(x, tn.Tensor):
            tensor_list.append(x)
        else:
            contract_list.append(x)

    tensor_list = [t.copy() for t in tensor_list]  #Unlink from memory
    all_tensor_indices = [t.labels for t in tensor_list]

    #Check that all no index is specified in more than one contraction
    contracted_indices = [item for pair in contract_list for item in pair]
    if len(set(contracted_indices)) != len(contracted_indices):
        raise ValueError("Index found in more than one contraction pair.")

    index_lookup = {}
    for i, labels in enumerate(all_tensor_indices):
        for lab in labels:
            if lab in index_lookup.keys():
                raise ValueError("Index label " + lab +
                                 " found in two tensors." +
                                 " Tensors must have unique index labelling.")
            index_lookup[lab] = i

    internal_contract = []  #Indicies contracted within the same tensor
    pairwise_contract = []  #Indicies contracted between different tensors
    tensor_pairs = []
    tensors_involved = set()
    for c in contract_list:
        if index_lookup[c[0]] == index_lookup[c[1]]:
            internal_contract.append(c)
        else:
            #Takes into account case where multiple indices from a pair of
            #tensors are contracted (will contract in one call to np.dot)
            #TODO: Better to flatten first?
            if (tuple(np.sort((index_lookup[c[0]], index_lookup[c[1]])))
                    in tensor_pairs):
                idx = tensor_pairs.index(
                    (index_lookup[c[0]], index_lookup[c[1]]))
                if not isinstance(pairwise_contract[idx][0], list):
                    pairwise_contract[idx][0] = [pairwise_contract[idx][0]]
                    pairwise_contract[idx][1] = [pairwise_contract[idx][1]]
                pairwise_contract[idx][0].append(c[0])
                pairwise_contract[idx][1].append(c[1])
            else:
                pairwise_contract.append(list(c))
                tensor_pairs.append(
                    tuple(np.sort((index_lookup[c[0]], index_lookup[c[1]]))))
                tensors_involved.add(index_lookup[c[0]])
                tensors_involved.add(index_lookup[c[1]])

    #Contract all internal indices
    for c in internal_contract:
        tensor_list[index_lookup[c[0]]].trace(c[0], c[1])

    #Contract pairs of tensors
    connected_component = [i for i in range(len(tensor_list))]
    for c in pairwise_contract:

        if isinstance(c[0], list):
            #Case where multiple indices of two tensors contracted
            d = index_lookup[c[0][0]]
            e = index_lookup[c[1][0]]
        else:
            d = index_lookup[c[0]]
            e = index_lookup[c[1]]

        if d == e:
            tensor_list[d].trace(c[0], c[1])
        else:
            if d < e:
                tensor_list[d] = tn.contract(tensor_list[d], tensor_list[e],
                                             c[0], c[1])
                connected_component[e] = d
            else:
                tensor_list[e] = tn.contract(tensor_list[e], tensor_list[d],
                                             c[1], c[0])
                connected_component[d] = e
            #Tensor in index_lookup refer to the first tensor
            #in which the label appers in the list
            for lab in tensor_list[min(d, e)].labels:
                index_lookup[lab] = min(d, e)

    #Take the tensor product of all the disconnected components
    return tn.tensor_product(*[
        tensor_list[connected_component.index(x)]
        for x in set(connected_component)
    ])