def Tucker_ALS(tenpy,
               A,
               T,
               O,
               num_iter,
               csv_file=None,
               Regu=None,
               method='DT',
               args=None,
               res_calc_freq=1):

    from Tucker.common_kernels import get_residual
    from Tucker.standard_ALS import Tucker_DTALS_Optimizer, Tucker_PPALS_Optimizer

    flag_dt = True

    if csv_file is not None:
        csv_writer = csv.writer(csv_file,
                                delimiter=',',
                                quotechar='|',
                                quoting=csv.QUOTE_MINIMAL)

    time_all = 0.
    optimizer_list = {
        'DT': Tucker_DTALS_Optimizer(tenpy, T, A),
        'PP': Tucker_PPALS_Optimizer(tenpy, T, A, args),
    }
    optimizer = optimizer_list[method]

    normT = tenpy.vecnorm(T)

    for i in range(num_iter):
        if i % res_calc_freq == 0 or i == num_iter - 1 or not flag_dt:
            res = get_residual(tenpy, T, optimizer.A)
            fitness = 1 - res / normT

            if tenpy.is_master_proc():
                print("[", i, "] Residual is", res, "fitness is: ", fitness)
                # write to csv file
                if csv_file is not None:
                    csv_writer.writerow([i, time_all, res, fitness, flag_dt])
                    csv_file.flush()
        t0 = time.time()
        if method == 'PP':
            A, pp_restart = optimizer.step(Regu)
            flag_dt = not pp_restart
        else:
            A = optimizer.step(Regu)
        t1 = time.time()
        tenpy.printf("Sweep took", t1 - t0, "seconds")
        time_all += t1 - t0
    tenpy.printf("Naive method took", time_all, "seconds overall")

    if args.save_tensor:
        folderpath = join(results_dir, arg_defs.get_file_prefix(args))
        save_decomposition_results(T, A, tenpy, folderpath)

    return A, res
def run_als(args):
    # Set up CSV logging
    csv_path = join(results_dir, arg_defs.get_file_prefix(args) + '.csv')
    is_new_log = not Path(csv_path).exists()
    csv_file = open(csv_path, 'a')
    csv_writer = csv.writer(csv_file,
                            delimiter=',',
                            quotechar='|',
                            quoting=csv.QUOTE_MINIMAL)

    profiler.do_profile(args.profile)

    if args.backend == "numpy":
        import backend.numpy_ext as tenpy
    elif args.backend == "ctf":
        import backend.ctf_ext as tenpy
        import ctf
        tepoch = ctf.timer_epoch("ALS")
        tepoch.begin()

    if tenpy.is_master_proc():
        for arg in vars(args):
            print(arg + ':', getattr(args, arg))
        if is_new_log:
            csv_writer.writerow([
                'iterations', 'time', 'residual', 'fitness', 'flag_dt',
                'fitness_diff'
            ])

    tenpy.seed(args.seed)
    if args.decomposition == "CP":
        return run_als_cpd(args, tenpy, csv_file)
    elif args.decomposition == "Tucker":
        return run_als_tucker(args, tenpy, csv_file)
    elif args.decomposition == "Tucker_simulate":
        return run_als_tucker_simulate(args, tenpy, csv_file)
    elif args.decomposition == "CP_simulate":
        return run_als_cp_simulate(args, tenpy, csv_file)
def CP_ALS(tenpy,
           A,
           T,
           num_iter,
           csv_file=None,
           Regu=0.,
           method='DT',
           args=None,
           res_calc_freq=1):

    ret_list = []

    from cpd.common_kernels import get_residual, get_residual_naive
    from cpd.als import CP_DTALS_Optimizer, CP_leverage_Optimizer
    from cpd.als import CP_PPALS_Optimizer, CP_PPsimulate_Optimizer, CP_partialPPALS_Optimizer

    flag_dt = True

    if csv_file is not None:
        csv_writer = csv.writer(csv_file,
                                delimiter=',',
                                quotechar='|',
                                quoting=csv.QUOTE_MINIMAL)

    iters = 0
    normT = tenpy.vecnorm(T)

    time_all = 0.
    if args is None:
        optimizer = CP_DTALS_Optimizer(tenpy, T, A)
    else:
        optimizer_list = {
            'DT': CP_DTALS_Optimizer(tenpy, T, A),
            'PP': CP_PPALS_Optimizer(tenpy, T, A, args),
            'PPsimulate': CP_PPsimulate_Optimizer(tenpy, T, A, args),
            'partialPP': CP_partialPPALS_Optimizer(tenpy, T, A, args),
            'Leverage': CP_leverage_Optimizer(tenpy, T, A, args),
        }
        optimizer = optimizer_list[method]

    fitness_old = 0
    for i in range(num_iter):

        t0 = time.time()
        if method == 'PP':
            A, pp_restart = optimizer.step(Regu)
            flag_dt = not pp_restart
        else:
            A = optimizer.step(Regu)
        t1 = time.time()
        tenpy.printf(f"[ {i} ] Sweep took {t1 - t0} seconds")
        time_all += t1 - t0

        if i % res_calc_freq == 0 or i == num_iter - 1 or not flag_dt:
            if method == 'Leverage':
                res = get_residual_naive(tenpy, T, A)
            else:
                res = get_residual(tenpy, optimizer.mttkrp_last_mode, A, normT)
            fitness = 1 - res / normT
            fitness_diff = abs(fitness - fitness_old)
            fitness_old = fitness

            if tenpy.is_master_proc():
                ret_list.append(
                    [i, time_all, res, fitness, flag_dt, fitness_diff])
                tenpy.printf(
                    f"[ {i} ] Residual is {res}, fitness is: {fitness}, fitness diff is: {fitness_diff}, timeall is: {time_all}"
                )
                if csv_file is not None:
                    csv_writer.writerow(
                        [i, time_all, res, fitness, flag_dt, fitness_diff])
                    csv_file.flush()
            # check the fitness difference
            if (i % res_calc_freq == 0):
                if abs(fitness_diff) <= args.stopping_tol * res_calc_freq:
                    tenpy.printf(
                        f"{method} method took {time_all} seconds overall")
                    return ret_list, optimizer.num_iters_map, optimizer.time_map, optimizer.pp_init_iter

    tenpy.printf(f"{method} method took {time_all} seconds overall")

    if args.save_tensor:
        folderpath = join(results_dir, arg_defs.get_file_prefix(args))
        save_decomposition_results(T, A, tenpy, folderpath)

    return ret_list, optimizer.num_iters_map, optimizer.time_map, optimizer.pp_init_iter
def Tucker_ALS(tenpy,
               A,
               T,
               num_iter,
               csv_file=None,
               Regu=0.,
               method='DT',
               args=None,
               res_calc_freq=1):

    from tucker.common_kernels import get_residual
    from tucker.als import Tucker_DTALS_Optimizer, Tucker_PPALS_Optimizer
    from tucker.als import Tucker_leverage_Optimizer, Tucker_countsketch_Optimizer, Tucker_countsketch_su_Optimizer

    flag_dt = True
    ret_list = []

    if csv_file is not None:
        csv_writer = csv.writer(csv_file,
                                delimiter=',',
                                quotechar='|',
                                quoting=csv.QUOTE_MINIMAL)

    time_all = 0.
    optimizer_list = {
        'DT': Tucker_DTALS_Optimizer(tenpy, T, A),
        'PP': Tucker_PPALS_Optimizer(tenpy, T, A, args),
        'Leverage': Tucker_leverage_Optimizer(tenpy, T, A, args),
        'Countsketch': Tucker_countsketch_Optimizer(tenpy, T, A, args),
        'Countsketch-su': Tucker_countsketch_su_Optimizer(tenpy, T, A, args)
    }
    optimizer = optimizer_list[method]

    normT = tenpy.vecnorm(T)
    fitness_old = 0.
    fitness_list = []
    for i in range(num_iter):
        if i % res_calc_freq == 0 or i == num_iter - 1 or not flag_dt:
            if args.save_tensor:
                folderpath = join(results_dir, arg_defs.get_file_prefix(args))
                save_decomposition_results(T, A, tenpy, folderpath)
            if method in ['DT', 'PP']:
                res = get_residual(tenpy, T, A)
            elif method in ['Leverage', 'Countsketch', 'Countsketch-su']:
                res = get_residual(tenpy, T, A, optimizer.core)
            fitness = 1 - res / normT
            d_fit = abs(fitness - fitness_old)
            fitness_old = fitness

            if tenpy.is_master_proc():
                print(
                    f"[ {i} ] Residual is {res}, fitness is: {fitness}, d_fit is: {d_fit}, core_norm is: {tenpy.vecnorm(optimizer.core)}"
                )
                ret_list.append([i, res, fitness, d_fit])
                if csv_file is not None:
                    csv_writer.writerow(
                        [i, time_all, res, fitness, flag_dt, d_fit])
                    csv_file.flush()
        t0 = time.time()
        if method == 'PP':
            A, pp_restart = optimizer.step(Regu)
            flag_dt = not pp_restart
        else:
            A = optimizer.step(Regu)
        t1 = time.time()
        tenpy.printf(f"[ {i} ] Sweep took {t1 - t0} seconds")
        time_all += t1 - t0
    tenpy.printf(f"{method} method took {time_all} seconds overall")

    return ret_list
Exemple #5
0
def CP_NLS(tenpy,A,T,O,num_iter,sp_res,csv_file=None,Regu=None,method='NLS',args=None,res_calc_freq=1,nls_tol= 1e-05,cg_tol = 1e-03, grad_tol = 0.1,num=1,switch_tol=0.1,nls_iter = 2, als_iter = 30, maxiter =0, varying =True, fact =2,lower = 1e-06, upper = 1,diag= 1, Arm = 0,c=1e-04,tau=0.75,arm_iters=20):

    from CPD.common_kernels import get_residual,get_residual_sp
    from CPD.NLS import CP_fastNLS_Optimizer


    if csv_file is not None:
        csv_writer = csv.writer(
            csv_file, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)

    if Regu is None:
        Regu = 0
        
    if varying:
        decrease= True
        increase=False
    
    iters = 0
    count = 0

    normT = tenpy.vecnorm(T)
    
    if maxiter == 0:
        maxiter = sum(T.shape)*R
    
    time_all = 0.
    if method == 'DT':
        method = 'NLS'
        optimizer = CP_fastNLS_Optimizer(tenpy,T,A,maxiter,cg_tol,num,diag,Arm,c,tau,arm_iters,args)
    else:
        optimizer_list = {
            'NLS': CP_fastNLS_Optimizer(tenpy,T,A,maxiter,cg_tol,num,diag,Arm,c,tau,arm_iters,args)
        }
        optimizer = optimizer_list[method]

    fitness_old = 0
    prev_res = np.finfo(np.float32).max
    for i in range(num_iter):

        if i % res_calc_freq == 0 or i==num_iter-1 :
            if sp_res:
                res = get_residual_sp(tenpy,O,T,A)
            else:
                res = get_residual(tenpy,T,A)
            fitness = 1-res/normT

            if tenpy.is_master_proc():
                print("[",i,"] Residual is", res, "fitness is: ", fitness)
                # write to csv file
                if csv_file is not None:
                    if method == 'NLS':
                        csv_writer.writerow([iters, time_all, res, fitness])
                    else:
                        csv_writer.writerow([i, time_all, res, fitness])
                    csv_file.flush()
        
           
        if res<nls_tol:
            tenpy.printf('Method converged due to residual tolerance in',i,'iterations')
            
            break
        t0 = time.time()
        
        
        if method == 'NLS':
            [A,iters,flag] = optimizer.step(Regu)
        else:
            A = optimizer.step(Regu)
        count+=1
            
        t1 = time.time()
        tenpy.printf("[",i,"] Sweep took", t1-t0,"seconds")
        
        time_all += t1-t0
        
        if method == 'NLS':
            if optimizer.g_norm < grad_tol:
                tenpy.printf('Method converged due to gradient tolerance in',i,'iterations')
                break
        
        #fitness_old = fitness
        
        
        if varying:
            if Regu <  lower:   
                increase=True
                decrease=False
                
            if Regu > upper:

                decrease= True
                increase=False
                    
                    
            if increase:
                Regu = Regu*fact
                    
            elif decrease:
                Regu = Regu/fact
                
        
    
    tenpy.printf(method+" method took",time_all,"seconds overall")
    
    

    if args.save_tensor:
        folderpath = join(results_dir, arg_defs.get_file_prefix(args))
        save_decomposition_results(T,A,tenpy,folderpath)

    return A
Exemple #6
0
    return A



if __name__ == "__main__":

    parser = argparse.ArgumentParser()
    arg_defs.add_pp_arguments(parser)
    arg_defs.add_lrdt_arguments(parser)
    arg_defs.add_sparse_arguments(parser)
    arg_defs.add_nls_arguments(parser)
    arg_defs.add_col_arguments(parser)
    args, _ = parser.parse_known_args()

    # Set up CSV logging
    csv_path = join(results_dir, arg_defs.get_file_prefix(args)+'.csv')
    is_new_log = not Path(csv_path).exists()
    csv_file = open(csv_path, 'a')#, newline='')
    csv_writer = csv.writer(
        csv_file, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)

    s = args.s
    order = args.order
    R = args.R
    r = args.r
    nls_tol = args.nls_tol
    grad_tol = args.grad_tol
    cg_tol = args.cg_tol
    switch_tol = args.switch_tol
    nls_iter = args.nls_iter
    als_iter = args.als_iter
def CP_ALS(tenpy,
           A,
           T,
           O,
           num_iter,
           csv_file=None,
           Regu=None,
           method='DT',
           args=None,
           res_calc_freq=1,
           tol=1e-05):

    from CPD.common_kernels import get_residual
    from CPD.standard_ALS import CP_DTALS_Optimizer, CP_PPALS_Optimizer

    flag_dt = True

    if csv_file is not None:
        csv_writer = csv.writer(csv_file,
                                delimiter=',',
                                quotechar='|',
                                quoting=csv.QUOTE_MINIMAL)

    if Regu is None:
        Regu = 0

    normT = tenpy.vecnorm(T)

    time_all = 0.
    if args is None:
        optimizer = CP_DTALS_Optimizer(tenpy, T, A)
    else:
        optimizer_list = {
            'DT': CP_DTALS_Optimizer(tenpy, T, A),
            'PP': CP_PPALS_Optimizer(tenpy, T, A, args),
        }
        optimizer = optimizer_list[method]

    fitness_old = 0
    for i in range(num_iter):

        if i % res_calc_freq == 0 or i == num_iter - 1 or not flag_dt:
            res = get_residual(tenpy, T, A)
            fitness = 1 - res / normT

            if tenpy.is_master_proc():
                print("[", i, "] Residual is", res, "fitness is: ", fitness)
                # write to csv file
                if csv_file is not None:
                    csv_writer.writerow([i, time_all, res, fitness, flag_dt])
                    csv_file.flush()

        if res < tol:
            print('Method converged in', i, 'iterations')
            break
        t0 = time.time()
        if method == 'PP':
            A, pp_restart = optimizer.step(Regu)
            flag_dt = not pp_restart
        else:
            A = optimizer.step(Regu)
        t1 = time.time()
        tenpy.printf("[", i, "] Sweep took", t1 - t0, "seconds")

        time_all += t1 - t0
        fitness_old = fitness

    tenpy.printf(method + " method took", time_all, "seconds overall")

    if args.save_tensor:
        folderpath = join(results_dir, arg_defs.get_file_prefix(args))
        save_decomposition_results(T, A, tenpy, folderpath)

    return res