Beispiel #1
0
def tucker_als(dim, size, rank, num_iter, input_val=[]):

    tg, executors_update, executor_loss, intermediates = tucker_als_graph(
        dim, size, rank)

    if input_val == []:
        A_val_list, core_val, X_val = init_rand_tucker(dim, size, rank)
    else:
        A_val_list, core_val, X_val = copy.deepcopy(input_val)

    for iter in range(num_iter):
        # als iterations
        for i in range(dim):

            feed_dict = dict(zip(tg.A_list, A_val_list))
            feed_dict.update({tg.core: core_val, tg.X: X_val})

            new_core_A_val, = executors_update[i].run(feed_dict=feed_dict)

            # update core_val and A_val_list[i] using SVD
            core_val, A_val_list[i] = n_mode_eigendec(intermediates[i],
                                                      new_core_A_val, rank)

        feed_dict = dict(zip(tg.A_list, A_val_list))
        feed_dict.update({tg.core: core_val, tg.X: X_val})
        loss_val, = executor_loss.run(feed_dict=feed_dict)

        print(f'At iteration {iter} the loss is: {loss_val}')

    return A_val_list, core_val, X_val
def run_als_tucker(args, tenpy, csv_file):
    if args.load_tensor is not '':
        T = tenpy.load_tensor_from_file(args.load_tensor + 'tensor.npy')
    elif args.tensor == "random":
        tenpy.printf("Testing random tensor")
        T = synthetic_tensors.init_rand_tucker(tenpy, args.rank_ratio,
                                               args.hosvd_core_dim, args.order,
                                               args.s, args.seed)
    elif args.tensor == "random_bias":
        tenpy.printf("Testing biased random tensor")
        sizes = [args.s] * args.order
        T = synthetic_tensors.init_rand_bias_tucker(tenpy, args.rank_ratio,
                                                    args.hosvd_core_dim,
                                                    args.order, args.s,
                                                    args.seed)
    elif args.tensor == "random_col":
        T = synthetic_tensors.init_const_collinearity_tensor(
            tenpy, args.s, args.order, args.R, args.col, args.seed)
    elif args.tensor == "amino":
        T = real_tensors.amino_acids(tenpy)
    elif args.tensor == "coil100":
        T = real_tensors.coil_100(tenpy)
    elif args.tensor == "timelapse":
        T = real_tensors.time_lapse_images(tenpy)
    elif args.tensor == "scf":
        T = real_tensors.get_scf_tensor(tenpy)

    tenpy.printf("The shape of the input tensor is: ", T.shape)
    Regu = args.regularization

    if args.load_tensor is not '':
        A = [
            tenpy.load_tensor_from_file("{args.load_tensor}mat{i}.npy")
            for i in range(T.ndim)
        ]
    elif args.hosvd != 0:
        from tucker.common_kernels import hosvd, rrf
        if args.hosvd == 1:
            A = hosvd(tenpy, T, args.hosvd_core_dim, compute_core=False)
        elif args.hosvd == 2:
            A = rrf(tenpy, T, args.hosvd_core_dim, epsilon=args.epsilon)
        elif args.hosvd == 3:
            A = rrf(tenpy,
                    T,
                    args.hosvd_core_dim,
                    epsilon=args.epsilon,
                    countsketch=True)
    else:
        A = [
            tenpy.random((args.hosvd_core_dim[i], T.shape[i]))
            for i in range(T.ndim)
        ]

    ret_list = Tucker_ALS(tenpy, A, T, args.num_iter, csv_file, Regu,
                          args.method, args, args.res_calc_freq)
    num_iters_map, time_map, pp_init_iter = None, None, None

    if args.backend == "ctf":
        tepoch.end()
    return ret_list, num_iters_map, time_map, pp_init_iter
Beispiel #3
0
def test_tucker_als_shared_exec(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)

        input_val = init_rand_tucker(dim, size, rank)
        A_val_list, _, X_val = input_val

        A_val_list_ad, core_val_ad, _ = tucker_als_shared_exec(
            dim, size, rank, 1, input_val)

        A1_val, A2_val, A3_val = A_val_list

        # expected values
        # ttmc: tensor times matrix chain
        ttmc = T.einsum("abc,bk,cl->akl", X_val, A2_val, A3_val)
        ttmc_inner = T.einsum("akl,bkl->ab", ttmc, ttmc)
        mat, _, _ = T.svd(ttmc_inner)
        A1_val = mat[:, :rank]

        ttmc = T.einsum("abc,ak,cl->kbl", X_val, A1_val, A3_val)
        ttmc_inner = T.einsum("kbl,kcl->bc", ttmc, ttmc)
        mat, _, _ = T.svd(ttmc_inner)
        A2_val = mat[:, :rank]

        ttmc = T.einsum("abc,ak,bl->klc", X_val, A1_val, A2_val)
        ttmc_inner = T.einsum("klc,kld->cd", ttmc, ttmc)
        mat, _, _ = T.svd(ttmc_inner)
        A3_val = mat[:, :rank]

        core_val = T.einsum("abc,ak,bl,cm->klm", X_val, A1_val, A2_val, A3_val)

        assert T.norm(A_val_list_ad[0] - A1_val) < 1e-8
        assert T.norm(A_val_list_ad[1] - A2_val) < 1e-8
        assert T.norm(A_val_list_ad[2] - A3_val) < 1e-8
        assert T.norm(core_val_ad - core_val) < 1e-8
Beispiel #4
0
def test_tucker(backendopt):
    for datatype in backendopt:
        T.set_backend(datatype)

        tg = TuckerGraph(dim, size, rank)
        executor = ad.Executor([tg.residual])

        A_val_list, core_val, X_val = init_rand_tucker(dim, size, rank)

        feed_dict = dict(zip(tg.A_list, A_val_list))
        feed_dict.update({tg.core: core_val, tg.X: X_val})

        residual_val, = executor.run(feed_dict=feed_dict)

        expect_residual_val = T.einsum('ae,bf,cg,efg->abc', *A_val_list,
                                       core_val) - X_val

        assert T.norm(residual_val - expect_residual_val) < 1e-8
Beispiel #5
0
def tucker_als_shared_exec(dim, size, rank, num_iter, input_val=[]):

    tg, executor_updates, executor_loss, loss, updates, intermediates = tucker_als_graph_shared_exec(
        dim, size, rank)

    if input_val == []:
        A_val_list, core_val, X_val = init_rand_tucker(dim, size, rank)
    else:
        A_val_list, core_val, X_val = copy.deepcopy(input_val)

    for iter in range(num_iter):
        # als iterations
        for i in range(dim):

            feed_dict = dict(zip(tg.A_list, A_val_list))
            feed_dict.update({tg.core: core_val, tg.X: X_val})

            if i == 0:
                new_core_A_val, = executor_updates.run(feed_dict=feed_dict,
                                                       out_nodes=[updates[0]])
            else:
                new_core_A_val, = executor_updates.run(
                    feed_dict=feed_dict,
                    out_nodes=[updates[i]],
                    reset_graph=False,
                    evicted_inputs=tg.A_list[:i])

            # update core_val and A_val_list[i] using SVD
            core_val, A_val_list[i] = n_mode_eigendec(intermediates[i],
                                                      new_core_A_val, rank)

        feed_dict = dict(zip(tg.A_list, A_val_list))
        feed_dict.update({tg.core: core_val, tg.X: X_val})
        loss_val, = executor_loss.run(feed_dict=feed_dict)

        print(f'At iteration {iter} the loss is: {loss_val}')

    return A_val_list, core_val, X_val
Beispiel #6
0
def test_tucker_als_shared_exec(benchmark):
    for datatype in BACKEND_TYPES:
        input_val = init_rand_tucker(dim, size, rank)
        outputs = benchmark(tucker_als_shared_exec, dim, size, rank, 10,
                            input_val)
def run_als_cpd(args, tenpy, csv_file):
    if args.load_tensor is not '':
        T = tenpy.load_tensor_from_file(args.load_tensor + 'tensor.npy')
    elif args.tensor == "random":
        tenpy.printf("Testing random tensor")
        sizes = [args.s] * args.order
        T = synthetic_tensors.init_rand(tenpy, args.order, sizes,
                                        int(args.R * args.rank_ratio),
                                        args.seed)
    elif args.tensor == "random_bias":
        tenpy.printf("Testing biased random tensor")
        sizes = [args.s] * args.order
        T = synthetic_tensors.init_rand_bias(tenpy, args.order, sizes, args.R,
                                             args.seed)
    elif args.tensor == "random_tucker":
        tenpy.printf("Testing random tucker tensor")
        T = synthetic_tensors.init_rand_tucker(tenpy, args.rank_ratio,
                                               args.hosvd_core_dim, args.order,
                                               args.s, args.seed)
    elif args.tensor == "random_col":
        T = synthetic_tensors.init_const_collinearity_tensor(
            tenpy, args.s, args.order, args.R, args.col, args.seed)
    elif args.tensor == "amino":
        T = real_tensors.amino_acids(tenpy)
    elif args.tensor == "coil100":
        T = real_tensors.coil_100(tenpy)
    elif args.tensor == "timelapse":
        T = real_tensors.time_lapse_images(tenpy)
    elif args.tensor == "scf":
        T = real_tensors.get_scf_tensor(tenpy)
    elif args.tensor == "graph":
        T = real_tensors.graph_state_5_party(tenpy)

    tenpy.printf("The shape of the input tensor is: ", T.shape)
    Regu = args.regularization

    if args.load_tensor is not '':
        A = [
            tenpy.load_tensor_from_file("{args.load_tensor}mat{i}.npy")
            for i in range(T.ndim)
        ]
    elif args.hosvd != 0:
        A = [
            tenpy.random((args.R, args.hosvd_core_dim[i]))
            for i in range(T.ndim)
        ]
    else:
        A = [tenpy.random((args.R, T.shape[i])) for i in range(T.ndim)]

    if args.hosvd:
        from tucker.common_kernels import hosvd
        transformer, compressed_T = hosvd(tenpy,
                                          T,
                                          args.hosvd_core_dim,
                                          compute_core=True)
        ret_list, num_iters_map, time_map, pp_init_iter = CP_ALS(
            tenpy, A, compressed_T, 100, csv_file, Regu, 'DT', args,
            args.res_calc_freq)
        A_fullsize = [tenpy.dot(transformer[i], A[i]) for i in range(T.ndim)]
        ret_list, num_iters_map, time_map, pp_init_iter = CP_ALS(
            tenpy, A_fullsize, T, args.num_iter, csv_file, Regu, args.method,
            args, args.res_calc_freq)
    else:
        ret_list, num_iters_map, time_map, pp_init_iter = CP_ALS(
            tenpy, A, T, args.num_iter, csv_file, Regu, args.method, args,
            args.res_calc_freq)

    if args.backend == "ctf":
        tepoch.end()
    return ret_list, num_iters_map, time_map, pp_init_iter