def generate_A(pars, step_factor=1.0, lorentzian=False, **kwargs): if lorentzian: step_dir = 1j else: step_dir = -1 A = datadispenser.get_data( dbname, "A", pars=pars, complexion_step_direction=step_dir, iter_count=0, complexion_timestep=pars["complexion_timestep"] * step_factor, **kwargs)[0] return A
def main(): pars = parse() apply_default_pars(pars, parinfo) datadispenser.update_default_pars("timeevolved_insertion_mcmps", pars, algorithm="MPS", t=3) dbname = pars["database"] if pars["debug"]: warnings.filterwarnings('error') datetime_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d_%H-%M-%S') title_str = ('{}_{}'.format(filename, datetime_str)) logfilename = "logs/{}.log".format(title_str) rootlogger = logging.getLogger() set_filehandler(rootlogger, logfilename, pars) # - Infoprint - infostr = "\n{}\n".format("="*70) infostr += "Running {} with the following parameters:".format(filename) for k,v in sorted(pars.items()): infostr += "\n%s = %s"%(k, v) logging.info(infostr) # Find the range of time and position for evaluating the # expectations. max_t_step = int(np.ceil(pars["max_t"]/pars["lorentzion_timestep"])) t_steps = list(range(max_t_step+1)) ts = [pars["lorentzion_timestep"]*t_step for t_step in t_steps] fid_t_file = datadir + "/fid_t_latest.npy" llimit = pars["llimit"] rlimit = pars["rlimit"] dist = rlimit - llimit if os.path.exists(fid_t_file): os.remove(fid_t_file) fid_t = np.empty((dist, 3, 0), dtype=np.complex_) for t, t_step in zip(ts, t_steps): t = np.around(t, 10) logging.info("\nt: {}".format(t)) mcmps = datadispenser.get_data( dbname, "timeevolved_insertion_mcmps", pars, t=t, algorithm="MPS" ) N = mcmps.length() eval_point = 0 w = mcmps.weights(eval_point) ent = entanglement_entropy(w) logging.info("Length: {}".format(N)) logging.info("Norm factors: {:.9e} & {:.9e}" .format(mcmps.normfactor, mcmps.umps.normfactor)) umps = datadispenser.get_data( dbname, "umps_groundstate", pars, algorithm="MPS" ) conj_mps = type(mcmps)(umps) # This identifier changing is some ad hoc crap that shouldn't be # necessary. conj_mps.change_identifier() mcmps.change_identifier() temp1 = np.array([mcmps.halfsystem_fidelity(conj_mps, i+1/2, normalize=False) for i in range(llimit, rlimit)]) temp2 = np.array([mcmps.window_fidelity(conj_mps, i, i+1, normalize=False) for i in range(llimit, rlimit)]) temp2 = np.reshape(temp2, (temp2.shape[0], 1)) temp = np.concatenate((temp1, temp2), axis=1) temp = np.reshape(temp, (temp.shape[0], 3, 1)) fid_t = np.concatenate((fid_t, temp), axis=2) np.save(fid_t_file, fid_t)
def main(): pars = parse() apply_default_pars(pars, parinfo) datadispenser.update_default_pars("umps_groundstate", pars, algorithm="MPS") dbname = pars["database"] if pars["debug"]: warnings.filterwarnings('error') datetime_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d_%H-%M-%S') title_str = ('{}_{}'.format(filename, datetime_str)) logfilename = "logs/{}.log".format(title_str) rootlogger = logging.getLogger() set_filehandler(rootlogger, logfilename, pars) # - Infoprint - infostr = "\n{}\n".format("=" * 70) infostr += "Running {} with the following parameters:".format(filename) for k, v in sorted(pars.items()): infostr += "\n%s = %s" % (k, v) logging.info(infostr) chi1 = pars["chi1"] chi2 = pars["chi2"] h1 = pars["h1"] h2 = pars["h2"] L = pars["L"] do_separate = pars["do_separate"] do_exact = pars["do_exact"] umps = datadispenser.get_data(dbname, "umps_groundstate", pars, algorithm="MPS", mps_chis=range(1, chi1), h_trans=h1) mcmps1 = McMPS(umps, tensors=[umps.tensor.copy()], weightss=[]) logging.info("Correlation length 1: {}".format( mcmps1.umps.correlation_length())) umps = datadispenser.get_data(dbname, "umps_groundstate", pars, algorithm="MPS", mps_chis=range(1, chi2), h_trans=h2) mcmps2 = McMPS(umps, tensors=[umps.tensor.copy()], weightss=[]) logging.info("Correlation length 2: {}".format( mcmps2.umps.correlation_length())) if do_separate: fids_separate = [] ul, ur = None, None for i in range(L): fid, ul, ur = mcmps1.window_fidelity_separate(mcmps2, 0, i, return_us=True, initial_us=(ul, ur)) fids_separate.append(fid) logging.info("{}, separate: {}".format(i, fid)) fids_separate = np.array(fids_separate) with open( "{}/fids_sep_latest_{}_{}_{}_{}_{}.p".format( datadir, chi1, chi2, h1, h2, L), "wb") as f: pickle.dump(fids_separate, f) if do_exact: fid0 = mcmps1.window_fidelity(mcmps2, 0, 0, log=True) fids_exact = mcmps1.window_fidelity(mcmps2, 0, L - 1, upto=True, log=True) fids_exact = np.concatenate(([fid0], fids_exact)) with open( "{}/fids_exact_latest_{}_{}_{}_{}_{}.p".format( datadir, chi1, chi2, h1, h2, L), "wb") as f: pickle.dump(fids_exact, f)
logfilename = "logs/{}.log".format(title_str) rootlogger = logging.getLogger() set_filehandler(rootlogger, logfilename, pars) # - Infoprint - infostr = "\n{}\n".format("=" * 70) infostr += "Running {} with the following parameters:".format(filename) for k, v in sorted(pars.items()): infostr += "\n%s = %s" % (k, v) logging.info(infostr) dbname = pars["database"] for it in pars["iters"]: logging.info("\nIteration {}".format(it)) res = datadispenser.get_data(dbname, "A", pars, iter_count=it) A, log_fact = res[0], res[1] if pars["print_spectra"]: es = get_A_spectrum(A) msg = "Spectrum of A:\n{}".format(es[:30]) logging.info(msg) if pars["print_free_energy"]: f = get_free_energy(A, log_fact, pars, it) exact_f = modeldata.get_free_energy(pars) f_error = np.abs(f - exact_f) / exact_f msg = ("Free energy per site: {} ({}, off by {:.4e})".format( f, exact_f, f_error)) logging.info(msg)
def main(): pars = parse() apply_default_pars(pars, parinfo) datadispenser.update_default_pars("timeevolved_insertion_mcmps", pars, algorithm="MPS", t=5) datadispenser.update_default_pars("A", pars, iter_count=0, complexion_step_direction=-1, algorithm="TNR", model="complexion_" + pars["model"], complexion_spacestep=1, complexion_timestep=1) dbname = pars["database"] if pars["debug"]: warnings.filterwarnings('error') datetime_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d_%H-%M-%S') title_str = ('{}_{}'.format(filename, datetime_str)) logfilename = "logs/{}.log".format(title_str) rootlogger = logging.getLogger() set_filehandler(rootlogger, logfilename, pars) # - Infoprint - infostr = "\n{}\n".format("=" * 70) infostr += "Running {} with the following parameters:".format(filename) for k, v in sorted(pars.items()): infostr += "\n%s = %s" % (k, v) logging.info(infostr) # Find the range of time and position for evaluating the # expectations. poses = list(range(-pars["max_x"], pars["max_x"] + 1)) max_t_step = int(np.ceil(pars["max_t"] / pars["lorentzion_timestep"])) t_steps = list(range(max_t_step + 1)) ts = [pars["lorentzion_timestep"] * t_step for t_step in t_steps] mcmps = datadispenser.get_data(dbname, "timeevolved_insertion_mcmps", pars, t=0, algorithm="MPS") if mcmps.tensortype() == Tensor: pars["symmetry_tensors"] = False else: pars["symmetry_tensors"] = True op_late = get_operator_insertion(pars["insertion_late"]) op_late = mcmps.tensortype().from_ndarray(op_late) # Evaluate correlators for various times. data_filename = None expectations = None for t, t_step in zip(ts, t_steps): t = np.around(t, 10) logging.info("\nt: {}".format(t)) mcmps = datadispenser.get_data(dbname, "timeevolved_insertion_mcmps", pars, t=t, algorithm="MPS") N = mcmps.length() eval_point = 0 w = mcmps.weights(eval_point) ent = entanglement_entropy(w) logging.info("Length: {}".format(N)) logging.info("Norm factors: {:.9e} & {:.9e}".format( mcmps.normfactor, mcmps.umps.normfactor)) logging.info("Entropy at {}: {}".format(eval_point, ent)) logging.info("Spectrum at {}:".format(eval_point)) logging.info(w) old_data_filename = data_filename expectations, data_filename, = update_expectations( mcmps, op_late, t, expectations, poses, pars) if old_data_filename is not None: os.remove(old_data_filename)