def set_fit_props(name, points, config, remove): from os import chdir, popen from os.path import basename, dirname import json from lib.utils import (fit_dir, config_dir, point_dir, dir_point, authorization_request) if remove: if points: print('Warning: points specification not compatible with --remove ' 'option.') return elif config != 'test': print('Warning: config specification not compatible with --remove ' 'option.') return chdir(fit_dir(name)) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: sims = [] # SIMS UPDATE if not remove: c_dir = config_dir(config) for Point in points: p_dir = c_dir + '/' + point_dir(Point) if p_dir not in sims: sims += [p_dir] with open('sims.json', 'w') as file: json.dump(sims, file, indent=4) # SIMS REMOTION else: new_sims = sims.copy() for sim in sims: Point = dir_point(basename(sim)) config = basename(dirname(sim)) what = f"to remove sim from fit '{name}'" extra = f"\033[38;5;80m config: '{config}'\033[0m" auth = authorization_request(Point=Point, what_to_do=what, extra_message=extra) if auth == 'quit': print('Nothing done for last sim.') return elif auth == 'yes': new_sims.remove(sim) with open('sims.json', 'w') as file: json.dump(new_sims, file, indent=4) print('Sim removed') else: print('Nothing removed.')
def sim_obs(points, config, plot, fit, exclude_torelons, exclude_bootstrap, fit_name, force): from os.path import basename, dirname, realpath import json from pprint import pprint from lib.utils import config_dir, dir_point, fit_dir if fit_name: f_dir = fit_dir(fit_name) try: with open(f_dir + '/sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation already assigned to this fit.') points = [] points_configs = {} for s in sims: if s[-1] == '/': s = s[:-1] Point = dir_point(basename(s)) points += [Point] points_configs = {**points_configs, Point: realpath(dirname(s))} c_dir = None else: points_configs = None c_dir = config_dir(config) col = 216 # color print(f'Number of selected points: \033[38;5;{col}m{len(points)}\033[0m') print(f'\033[38;5;{col}m', end='') pprint(points) print('\033[0m') if not force: i = 0 for Point in points: args = (Point, points_configs, c_dir, i, force, plot, fit, exclude_torelons, exclude_bootstrap) ret = sim_obs_compute(args) if ret == 'return': return elif ret == 'continue': continue i += 1 else: import multiprocessing as mp i = 0 args = [] for Point in points: args += [(Point, points_configs, c_dir, i, force, plot, fit, exclude_torelons, exclude_bootstrap)] i += 1 with mp.Pool(mp.cpu_count() - 1) as pool: pool.map(sim_obs_compute, args)
def mean_volumes(configs=None, print_flag=True, path=None): from pprint import pprint mvs = {} for c, sims in sim_paths().items(): if configs and c not in configs: continue for path in sims: from os import chdir from os.path import basename, isfile import json from numpy import loadtxt from lib.utils import dir_point chdir(path) Point = dir_point(basename(path)) if isfile('history/volumes.txt'): _, volumes = loadtxt('history/volumes.txt', unpack=True) with open('state.json', 'r') as state_file: state = json.load(state_file) try: cut = state['cut'] except KeyError: cut = 0 try: mvs[c] = {**mvs[c], Point: volumes[cut:].mean()} except KeyError: mvs[c] = {Point: volumes[cut:].mean()} if print_flag: pprint(mvs) return mvs
def set_cut(p_dir, i=0, force=False): from os import chdir from os.path import basename from math import ceil from numpy import loadtxt from pandas import read_csv from lib.utils import dir_point chdir(p_dir) Point = dir_point(basename(p_dir)) vol_file = 'history/volumes.txt' indices, volumes = read_csv(vol_file, sep=' ').values[:, :2].transpose() if force: index = indices[-1] * 0.2 else: index, _ = select_from_plot(Point, indices, volumes, i) if index: return ceil(index) else: return index
def divergent_points(configs=None, conf_plot=False, save_path=None, load_path=None): from os.path import isdir from numpy import array from matplotlib.pyplot import plot, show, figure if load_path and isdir(load_path): from os import chdir from numpy import loadtxt chdir(load_path) try: convergent = loadtxt('convergent.csv') divergent = loadtxt('divergent.csv') if len(convergent.shape) == 1: convergent = convergent.reshape(1, convergent.shape[0]) if len(divergent.shape) == 1: divergent = divergent.reshape(1, divergent.shape[0]) except OSError: print(f"Invalid path given: '{load_path}'.") return try: fit_line = loadtxt('fit_line.csv') except OSError: fit_line = [] else: convergent = [] divergent = [] fit_line = [] if conf_plot: from matplotlib.text import Text ylabel = [] i = 0 for c, sims in sim_paths().items(): if configs and c not in configs: continue if conf_plot: i += 1 print(i, c) # ylabel += [Text(x=(i-1), y=i, text=c)] ylabel += [c] for path in sims: from os import chdir from os.path import basename, isfile from lib.utils import dir_point chdir(path) if conf_plot: Point = [dir_point(basename(path))[0], i] else: Point = dir_point(basename(path)) if isfile('max_volume_reached'): divergent += [Point] elif isfile('history/volumes.txt'): convergent += [Point] fig = figure() ax = fig.add_subplot(111) if len(convergent) > 0: convergent = array(convergent) # *zip(*convergent) ax.plot(convergent[:, 0], convergent[:, 1], 'b+') if len(divergent) > 0: divergent = array(divergent) # *zip(*divergent) ax.plot(divergent[:, 0], divergent[:, 1], 'r+') if len(fit_line) > 0: ax.plot(fit_line[:, 0], fit_line[:, 1], 'k') if save_path and isdir(save_path): from os import makedirs, chdir, getcwd from numpy import savetxt, array chdir(save_path) makedirs('DivergentPlot', exist_ok=True) chdir('DivergentPlot') savetxt('convergent.csv', array(convergent)) savetxt('divergent.csv', array(divergent)) savetxt('fit_line.csv', array(fit_line)) elif save_path: print(f"Invalid path given: '{path}'.") if conf_plot: from numpy import linspace yticks_location = linspace(1, len(ylabel), len(ylabel)) ax.set_yticks(yticks_location) ax.set_yticklabels(ylabel) ax.set_ylim(0, len(ylabel) + 1) if save_path and isdir(save_path): from matplotlib.pyplot import savefig savefig(save_path + '/DivergentPlot/DivergentPlot.pdf') elif load_path and isdir(load_path): from matplotlib.pyplot import savefig savefig(load_path + '/DivergentPlot.pdf') show()
def export_data(name, unpack): from os import chdir from os.path import basename, dirname, isfile import json import numpy as np from lib.utils import fit_dir, dir_point fit_d = fit_dir(name) chdir(fit_d) if not unpack: try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation assigned to this fit.') return data = [] for s in sims: if s[-1] == '/': s = s[:-1] config = basename(dirname(s)) Point = dir_point(basename(s)) point_data = {} if isfile(s + '/max_volume_reached'): print(f'\033[38;5;41m{Point}\033[0m not included in fit, ' 'because ' '\033[38;5;80mmax_volume_reached\033[0m is present.') print(f"\033[38;5;80m config: '{config}'\033[0m") continue try: with open(s + '/measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: print(f'\033[38;5;41m{Point}\033[0m no measure file present.') print(f"\033[38;5;80m config: '{config}'\033[0m") continue point_data['lambda'] = Point[0] point_data['beta'] = Point[1] point_data['config'] = config point_data.update(measures.copy()) for prop in ['cut', 'block', 'time']: try: del point_data[prop] except KeyError: pass data += [point_data] print(f'\033[38;5;41m{Point}\033[0m collected.') with open('data.json', 'w') as file: json.dump(data, file, indent=4) elif unpack in ['v', 'volumes']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return vol_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: volume = point_data['volume'] except KeyError: continue # print(f'\033[38;5;41m{Point}\033[0m, {config}:') # print('\t', volume) vol_data += [[Point[0], Point[1], volume[0], volume[1], config]] with open('volumes.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Volume Error Config' + end) vol_data = sorted(vol_data) for point_vol in vol_data: str_point_vol = [] for x in point_vol: str_point_vol += [str(x)] file.write(sep.join(str_point_vol) + end) print(f"\033[38;5;41m({name})\033[0m volumes from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'volumes.csv'\033[0m") elif unpack in ['g', 'gauge-action']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return g_action_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: g_action = point_data['action'] g_action_density = point_data['action-density'] except KeyError: continue # print(f'\033[38;5;41m{Point}\033[0m, {config}:') # print('\t', volume) g_action_data += [[Point[0], Point[1], g_action[0], g_action[1], g_action_density[0], g_action_density[1], config]] with open('gauge_action.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Action Error ActionDensity Error Config' + end) g_action_data = sorted(g_action_data) for point_g_action in g_action_data: str_point_g_action = [] for x in point_g_action: str_point_g_action += [str(x)] file.write(sep.join(str_point_g_action) + end) print(f"\033[38;5;41m({name})\033[0m gauge actions from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'gauge_action.csv'\033[0m") elif unpack in ['p', 'profiles']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return profile_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: profile, errors = point_data['profiles_corr'] except KeyError: continue profile_data += [[Point[0], Point[1], config, *profile, *errors]] with open('profiles.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda[0] Beta[1] Config[2] Profile[3:3+t}] ' + 'Errors[3+t:3+2t]' + end) profile_data = sorted(profile_data) for point_profile in profile_data: str_point_profile = [] for x in point_profile: str_point_profile += [str(x)] file.write(sep.join(str_point_profile) + end) print(f"\033[38;5;41m({name})\033[0m profiles from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'profiles.csv'\033[0m") elif unpack in ['pf', 'profiles-fit', 'pf2', 'profiles-fit2']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return profile_fit_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: if unpack[-1] != '2': fit_data = point_data['profiles_corr_fit'] else: fit_data = point_data['profiles-corr-fit2'] len_corr = fit_data['par'][0] err = np.sqrt(fit_data['cov'][0][0]) except KeyError: continue profile_fit_data += [[Point[0], Point[1], len_corr, err, config]] if unpack[-1] != '2': file_name = 'profiles_length.csv' else: file_name = 'profiles_length2.csv' with open(file_name, 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Corr_Length Error Config' + end) profile_fit_data = sorted(profile_fit_data) for point_profile in profile_fit_data: str_point_profile = [] for x in point_profile: str_point_profile += [str(x)] file.write(sep.join(str_point_profile) + end) print(f"\033[38;5;41m({name})\033[0m profiles fit from " "\033[38;5;80m'data.json'\033[0m unpacked to " f"\033[38;5;80m'{file_name}'\033[0m") elif unpack in ['top', 'susc', 'top-susc']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return susc_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: susc = point_data['top-susc'] except KeyError: continue susc_data += [[Point[0], Point[1], susc[0], susc[1], config]] with open('top_susc.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Top-Susc Error Config' + end) susc_data = sorted(susc_data) for point_susc in susc_data: str_point_susc = [] for x in point_susc: str_point_susc += [str(x)] file.write(sep.join(str_point_susc) + end) print(f"\033[38;5;41m({name})\033[0m topological susceptibilities from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'top_susc.csv'\033[0m") elif unpack in ['t', 'torelons']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return torelon_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: # print(Point) torelon, errors = point_data['torelon-decay'] # print(len(point_data['torelon-decay'])) except KeyError: continue torelon_data += [[Point[0], Point[1], config, *torelon, *errors]] with open('torelons.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda[0] Beta[1] Config[2] Torelon[3:3+t}] ' + 'Errors[3+t:3+2t]' + end) for point_torelon in torelon_data: str_point_torelon = [] torelon_data = sorted(torelon_data) for x in point_torelon: str_point_torelon += [str(x)] file.write(sep.join(str_point_torelon) + end) print(f"\033[38;5;41m({name})\033[0m torelons from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'torelons.csv'\033[0m") elif unpack in ['tf', 'torelons-fit', 'tf2', 'torelons-fit2']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return profile_fit_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: if unpack[-1] != '2': fit_data = point_data['torelon-decay-fit'] else: fit_data = point_data['torelon-decay-fit2'] len_corr = fit_data['par'][0] err = np.sqrt(fit_data['cov'][0][0]) except KeyError: continue profile_fit_data += [[Point[0], Point[1], len_corr, err, config]] if unpack[-1] != '2': file_name = 'torelon_length.csv' else: file_name = 'torelon_length2.csv' with open(file_name, 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Corr_Length Error Config' + end) profile_fit_data = sorted(profile_fit_data) for point_profile in profile_fit_data: str_point_profile = [] for x in point_profile: str_point_profile += [str(x)] file.write(sep.join(str_point_profile) + end) print(f"\033[38;5;41m({name})\033[0m profiles fit from " "\033[38;5;80m'data.json'\033[0m unpacked to " f"\033[38;5;80m'{file_name}'\033[0m")
def info_fit(name, kind='sims'): from os import chdir from os.path import basename, dirname from pprint import pprint import json from lib.utils import fit_dir, config_dir, dir_point if kind in ['s', 'sims', None]: kind = 'sims' elif kind in ['o', 'obs']: kind = 'obs' chdir(fit_dir(name)) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation already assigned to this fit.') d = {} for s in sims: if s[-1] == '/': s = s[:-1] if kind == 'sims': config = basename(dirname(s)) Point = dir_point(basename(s)) try: d[config] += [Point] except KeyError: d[config] = [Point] elif kind == 'obs': try: with open(s + '/measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: measures = {} flags = '' if 'cut' in measures.keys(): flags += 'C' if 'block' in measures.keys(): flags += 'B' if 'volume' in measures.keys(): flags += 'V' config = basename(dirname(s)) Point = dir_point(basename(s)) try: d[config] += [[Point, flags]] except KeyError: d[config] = [[Point, flags]] for k in d.keys(): d[k] = sorted(d[k]) if kind == 'sims': pprint(d) elif kind == 'obs': pprint(d) else: raise ValueError('info-fit: kind {kind} not recognized')
def fit_divergence(name, kind='volumes', reload=False): from os import chdir from os.path import basename, dirname, isfile from datetime import datetime import json from pprint import pprint from numpy import genfromtxt from lib.utils import fit_dir, dir_point from lib.analysis.fit import fit_divergence if kind in ['v', 'volumes']: kind = 'volumes' kind_file = 'volumes' elif kind in ['p', 'profiles']: kind = 'profiles' kind_file = 'profiles_length' elif kind in ['t', 'torelons']: kind = 'torelons' kind_file = 'torelons_length' elif kind in ['g', 'gauge-action']: kind = 'gauge-action' kind_file = 'gauge_action' elif kind in ['top', 'susc', 'top-susc']: kind = 'topological-susceptibility' kind_file = 'top_susc' else: raise ValueError(f'{kind} not available for divergence fit.') fit_d = fit_dir(name) chdir(fit_d) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation already assigned to this fit.') # do not return, because if 'kind.csv' is present it can use that if not isfile(f'{kind_file}.csv') or reload: d = {} lambdas = [] betas = [] means = [] errors = [] for s in sims: if s[-1] == '/': s = s[:-1] config = basename(dirname(s)) Point = dir_point(basename(s)) if isfile(s + '/max_volume_reached'): print(f'\033[38;5;41m{Point}\033[0m not included in fit, ' 'because ' '\033[38;5;80mmax_volume_reached\033[0m is present.') print(f"\033[38;5;80m config: '{config}'\033[0m") continue try: with open(s + '/measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: measures = {} with open(s + '/state.json', 'r') as file: state = json.load(file) if 'time' in measures.keys(): s_time = datetime.strptime(state['end_time'], '%d-%m-%Y %H:%M:%S') m_time = datetime.strptime(measures['time'], '%d-%m-%Y %H:%M:%S') else: print(f'Mising time in {Point}, in config: {config}.') return # print(Point) # print(s_time, type(s_time), '\n' + str(m_time), type(m_time)) if(s_time > m_time): print('\033[38;5;203mWarning:\033[0m in Point ' f'\033[38;5;41m{Point}\033[0m in ' f"\033[38;5;80mconfig: '{config}'\033[0m measures are " '\033[38;5;210mnot up to date\033[0m ' 'with last simulation\'s data') print() d[Point] = {'config': config, **measures, 'time_sim_end': state['end_time']} k_key = f'{kind}'[:-1] if k_key in measures.keys(): lambdas += [Point[0]] betas += [Point[1]] means += [measures[k_key][0]] errors += [measures[k_key][1]] else: print(f"Missing {k_key} in {Point}, in config: {config}.") return with open(f'{kind_file}.csv', 'w') as file: file.write('# Lambda Beta Volume Error Config\n') for Point, attr in d.items(): mean, err = attr[f'{kind}'[:-1]] data = [Point[0], Point[1], mean, err, attr['config']] line = ' '.join([str(x) for x in data]) file.write(line + '\n') else: data = genfromtxt(f'{kind_file}.csv', unpack=True) lambdas, betas = data[:2] if kind == 'gauge-action': means, errors = data[4:6] else: means, errors = data[2:4] fit_divergence(lambdas, means, errors, betas, kind=kind)
def set_block(p_dir, i=0, force=False): from os import chdir from os.path import basename from math import log import json import numpy as np from pandas import read_csv from lib.utils import dir_point chdir(p_dir) Point = dir_point(basename(p_dir)) with open('measures.json', 'r') as file: measures = json.load(file) cut = measures['cut'] if not cut: print( "No 'cut' found, so it's not possible to go on with the 'block'.") return None vol_file = 'history/volumes.txt' indices, volumes = read_csv(vol_file, sep=' ').values[:, :2].transpose() imax = indices[-1] volumes_cut = volumes[indices > cut] indices_cut = indices[indices > cut] if volumes_cut.std() < 1e-5: # use gauge-action as a pseudo-volume if volume is fixed vol_file = 'history/gauge.txt' indices, volumes = read_csv(vol_file, sep=' ').values[:, :2].transpose() imax = indices[-1] volumes_cut = volumes[indices > cut] indices_cut = indices[indices > cut] ratio = 1.3 block_sizes = [ ratio**k for k in range(15, int(log(imax - cut, ratio)) - 12) ] stdevs = [] for bs in block_sizes: _, stdev = blocked_mean_std(indices_cut, volumes_cut, bs) stdevs += [stdev] if force: block = block_sizes[np.array(stdevs).argmax()] else: block, _ = select_from_plot(Point, block_sizes, stdevs, i, proj_axis='xy', block=True) if block == None or block == -1: print('Nothing done.') return None block = int(block) return block