def set_fit_props(name, points, config, remove): from os import chdir, popen from os.path import basename, dirname import json from lib.utils import (fit_dir, config_dir, point_dir, dir_point, authorization_request) if remove: if points: print('Warning: points specification not compatible with --remove ' 'option.') return elif config != 'test': print('Warning: config specification not compatible with --remove ' 'option.') return chdir(fit_dir(name)) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: sims = [] # SIMS UPDATE if not remove: c_dir = config_dir(config) for Point in points: p_dir = c_dir + '/' + point_dir(Point) if p_dir not in sims: sims += [p_dir] with open('sims.json', 'w') as file: json.dump(sims, file, indent=4) # SIMS REMOTION else: new_sims = sims.copy() for sim in sims: Point = dir_point(basename(sim)) config = basename(dirname(sim)) what = f"to remove sim from fit '{name}'" extra = f"\033[38;5;80m config: '{config}'\033[0m" auth = authorization_request(Point=Point, what_to_do=what, extra_message=extra) if auth == 'quit': print('Nothing done for last sim.') return elif auth == 'yes': new_sims.remove(sim) with open('sims.json', 'w') as file: json.dump(new_sims, file, indent=4) print('Sim removed') else: print('Nothing removed.')
def sim_obs(points, config, plot, fit, exclude_torelons, exclude_bootstrap, fit_name, force): from os.path import basename, dirname, realpath import json from pprint import pprint from lib.utils import config_dir, dir_point, fit_dir if fit_name: f_dir = fit_dir(fit_name) try: with open(f_dir + '/sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation already assigned to this fit.') points = [] points_configs = {} for s in sims: if s[-1] == '/': s = s[:-1] Point = dir_point(basename(s)) points += [Point] points_configs = {**points_configs, Point: realpath(dirname(s))} c_dir = None else: points_configs = None c_dir = config_dir(config) col = 216 # color print(f'Number of selected points: \033[38;5;{col}m{len(points)}\033[0m') print(f'\033[38;5;{col}m', end='') pprint(points) print('\033[0m') if not force: i = 0 for Point in points: args = (Point, points_configs, c_dir, i, force, plot, fit, exclude_torelons, exclude_bootstrap) ret = sim_obs_compute(args) if ret == 'return': return elif ret == 'continue': continue i += 1 else: import multiprocessing as mp i = 0 args = [] for Point in points: args += [(Point, points_configs, c_dir, i, force, plot, fit, exclude_torelons, exclude_bootstrap)] i += 1 with mp.Pool(mp.cpu_count() - 1) as pool: pool.map(sim_obs_compute, args)
def reset_fit(names, delete): from os.path import isdir from os import chdir, mkdir from shutil import rmtree from re import fullmatch import json from lib.utils import (authorization_request, fit_dir, find_fits, project_folder) pattern_names = [] pure_names = [] all_names = list(find_fits().keys()) for name in names: if name[0] == 'ยง': pattern_names += [c for c in all_names if fullmatch(name[1:], c)] else: pure_names += [name] names = list(set(pure_names + pattern_names)) print(f'Chosen fits are:\n {names}') for name in names: fit = fit_dir(name) if delete: action = 'delete' action_p = action + 'd' else: action = 'reset' action_p = action what_to_do = 'to ' + action + ' the fit \'' + name + '\'' authorized = authorization_request(what_to_do) if authorized == 'yes': rmtree(fit) if action == 'reset': mkdir(fit) elif action == 'delete': with open(project_folder() + '/output/fits.json', 'r') as file: fits = json.load(file) del fits[name] with open(project_folder() + '/output/fits.json', 'w') as file: json.dump(fits, file, indent=4) print(f'Fit {name} has been {action_p}.') elif authorized == 'quit': print('Nothing done on last fit.') return else: print('Nothing done.')
def preplot(fit_name, kind): from os import chdir import numpy as np import matplotlib.pyplot as plt from lib.utils import fit_dir import seaborn as sns sns.set_style('whitegrid') sns.set_context('talk') chdir(fit_dir(fit_name)) if kind in ['v', 'volumes']: try: data = np.genfromtxt('volumes.csv', unpack=True) except (FileNotFoundError, OSError): print(f"No file 'volumes.csv' for fit {fit_name}") return Lambda = data[0] vol, err = data[2], data[3] plt.title('Volumes:\n' + fit_name) plt.errorbar(Lambda, vol, err, fmt='none', capsize=5) plt.savefig('pre_volumes.pdf') plt.show() elif kind in ['p', 'profiles']: try: data = np.genfromtxt('profiles_length.csv', unpack=True) except (FileNotFoundError, OSError): print(f"No file 'profiles_length.csv' for fit {fit_name}") return Lambda = data[0] pro_xi, err = data[2], data[3] plt.title('Profiles Correlation Lengths:\n' + fit_name) plt.errorbar(Lambda, pro_xi, err, fmt='none', capsize=5) plt.savefig('pre_profiles.pdf') plt.show() elif kind in ['g', 'action', 'gauge', 'gauge-action']: try: data = np.genfromtxt('gauge_action.csv', unpack=True) except (FileNotFoundError, OSError): print(f"No file 'gauge_action.csv' for fit {fit_name}") return Lambda, Beta = data[0], data[1] g_action, err = data[2], data[3] g_action_density, density_err = data[4], data[5] # the average coord num in 2d is fixed and it is 6 g_av_plaq = -((g_action_density * 6) / Beta - 1) av_plaq_err = (density_err * 6) / Beta fig, axs = plt.subplots(2, 1) axs[0].set_title('Gauge action:\n' + fit_name) axs[0].errorbar(Lambda, g_action, err, fmt='none', c='tab:green', capsize=5, label='action') axs[0].legend() axs[1].errorbar(Lambda, g_av_plaq, av_plaq_err, fmt='none', capsize=5, label='average plaquette') axs[1].legend() plt.savefig('pre_gauge_action.pdf') plt.show() elif kind in ['top', 'susc', 'top-susc']: try: data = np.genfromtxt('top_susc.csv', unpack=True) except (FileNotFoundError, OSError): print(f"No file 'top_susc.csv' for fit {fit_name}") return Lambda = data[0] top, err = data[2], data[3] plt.title('Topological susceptibilities:\n' + fit_name) plt.errorbar(Lambda, top, err, fmt='none', capsize=5) plt.savefig('pre_top_susc.pdf') plt.show() elif kind in ['t', 'torelons']: try: data = np.genfromtxt('torelon_length.csv', unpack=True) except (FileNotFoundError, OSError): print(f"No file 'torelon_length.csv' for fit {fit_name}") return Lambda = data[0] tor_xi, err = data[2], data[3] plt.title('Torelons Correlation Lengths:\n$\\beta = ' + fit_name[4:] + '$') plt.errorbar(Lambda, tor_xi, err, fmt='none', capsize=5) plt.xlabel('$\\lambda$') plt.ylabel('$\\xi_T$') plt.tight_layout() plt.savefig('pre_torelons.pdf') plt.show()
def export_data(name, unpack): from os import chdir from os.path import basename, dirname, isfile import json import numpy as np from lib.utils import fit_dir, dir_point fit_d = fit_dir(name) chdir(fit_d) if not unpack: try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation assigned to this fit.') return data = [] for s in sims: if s[-1] == '/': s = s[:-1] config = basename(dirname(s)) Point = dir_point(basename(s)) point_data = {} if isfile(s + '/max_volume_reached'): print(f'\033[38;5;41m{Point}\033[0m not included in fit, ' 'because ' '\033[38;5;80mmax_volume_reached\033[0m is present.') print(f"\033[38;5;80m config: '{config}'\033[0m") continue try: with open(s + '/measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: print(f'\033[38;5;41m{Point}\033[0m no measure file present.') print(f"\033[38;5;80m config: '{config}'\033[0m") continue point_data['lambda'] = Point[0] point_data['beta'] = Point[1] point_data['config'] = config point_data.update(measures.copy()) for prop in ['cut', 'block', 'time']: try: del point_data[prop] except KeyError: pass data += [point_data] print(f'\033[38;5;41m{Point}\033[0m collected.') with open('data.json', 'w') as file: json.dump(data, file, indent=4) elif unpack in ['v', 'volumes']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return vol_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: volume = point_data['volume'] except KeyError: continue # print(f'\033[38;5;41m{Point}\033[0m, {config}:') # print('\t', volume) vol_data += [[Point[0], Point[1], volume[0], volume[1], config]] with open('volumes.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Volume Error Config' + end) vol_data = sorted(vol_data) for point_vol in vol_data: str_point_vol = [] for x in point_vol: str_point_vol += [str(x)] file.write(sep.join(str_point_vol) + end) print(f"\033[38;5;41m({name})\033[0m volumes from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'volumes.csv'\033[0m") elif unpack in ['g', 'gauge-action']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return g_action_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: g_action = point_data['action'] g_action_density = point_data['action-density'] except KeyError: continue # print(f'\033[38;5;41m{Point}\033[0m, {config}:') # print('\t', volume) g_action_data += [[Point[0], Point[1], g_action[0], g_action[1], g_action_density[0], g_action_density[1], config]] with open('gauge_action.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Action Error ActionDensity Error Config' + end) g_action_data = sorted(g_action_data) for point_g_action in g_action_data: str_point_g_action = [] for x in point_g_action: str_point_g_action += [str(x)] file.write(sep.join(str_point_g_action) + end) print(f"\033[38;5;41m({name})\033[0m gauge actions from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'gauge_action.csv'\033[0m") elif unpack in ['p', 'profiles']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return profile_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: profile, errors = point_data['profiles_corr'] except KeyError: continue profile_data += [[Point[0], Point[1], config, *profile, *errors]] with open('profiles.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda[0] Beta[1] Config[2] Profile[3:3+t}] ' + 'Errors[3+t:3+2t]' + end) profile_data = sorted(profile_data) for point_profile in profile_data: str_point_profile = [] for x in point_profile: str_point_profile += [str(x)] file.write(sep.join(str_point_profile) + end) print(f"\033[38;5;41m({name})\033[0m profiles from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'profiles.csv'\033[0m") elif unpack in ['pf', 'profiles-fit', 'pf2', 'profiles-fit2']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return profile_fit_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: if unpack[-1] != '2': fit_data = point_data['profiles_corr_fit'] else: fit_data = point_data['profiles-corr-fit2'] len_corr = fit_data['par'][0] err = np.sqrt(fit_data['cov'][0][0]) except KeyError: continue profile_fit_data += [[Point[0], Point[1], len_corr, err, config]] if unpack[-1] != '2': file_name = 'profiles_length.csv' else: file_name = 'profiles_length2.csv' with open(file_name, 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Corr_Length Error Config' + end) profile_fit_data = sorted(profile_fit_data) for point_profile in profile_fit_data: str_point_profile = [] for x in point_profile: str_point_profile += [str(x)] file.write(sep.join(str_point_profile) + end) print(f"\033[38;5;41m({name})\033[0m profiles fit from " "\033[38;5;80m'data.json'\033[0m unpacked to " f"\033[38;5;80m'{file_name}'\033[0m") elif unpack in ['top', 'susc', 'top-susc']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return susc_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: susc = point_data['top-susc'] except KeyError: continue susc_data += [[Point[0], Point[1], susc[0], susc[1], config]] with open('top_susc.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Top-Susc Error Config' + end) susc_data = sorted(susc_data) for point_susc in susc_data: str_point_susc = [] for x in point_susc: str_point_susc += [str(x)] file.write(sep.join(str_point_susc) + end) print(f"\033[38;5;41m({name})\033[0m topological susceptibilities from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'top_susc.csv'\033[0m") elif unpack in ['t', 'torelons']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return torelon_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: # print(Point) torelon, errors = point_data['torelon-decay'] # print(len(point_data['torelon-decay'])) except KeyError: continue torelon_data += [[Point[0], Point[1], config, *torelon, *errors]] with open('torelons.csv', 'w') as file: sep = ' ' end = '\n' file.write('# Lambda[0] Beta[1] Config[2] Torelon[3:3+t}] ' + 'Errors[3+t:3+2t]' + end) for point_torelon in torelon_data: str_point_torelon = [] torelon_data = sorted(torelon_data) for x in point_torelon: str_point_torelon += [str(x)] file.write(sep.join(str_point_torelon) + end) print(f"\033[38;5;41m({name})\033[0m torelons from " "\033[38;5;80m'data.json'\033[0m unpacked to " "\033[38;5;80m'torelons.csv'\033[0m") elif unpack in ['tf', 'torelons-fit', 'tf2', 'torelons-fit2']: try: with open('data.json', 'r') as file: data = json.load(file) except FileNotFoundError: print("No data file (\033[38;5;80m'data.json'\033[0m) to unpack.") return profile_fit_data = [] for point_data in data: Point = (point_data['lambda'], point_data['beta']) config = point_data['config'] try: if unpack[-1] != '2': fit_data = point_data['torelon-decay-fit'] else: fit_data = point_data['torelon-decay-fit2'] len_corr = fit_data['par'][0] err = np.sqrt(fit_data['cov'][0][0]) except KeyError: continue profile_fit_data += [[Point[0], Point[1], len_corr, err, config]] if unpack[-1] != '2': file_name = 'torelon_length.csv' else: file_name = 'torelon_length2.csv' with open(file_name, 'w') as file: sep = ' ' end = '\n' file.write('# Lambda Beta Corr_Length Error Config' + end) profile_fit_data = sorted(profile_fit_data) for point_profile in profile_fit_data: str_point_profile = [] for x in point_profile: str_point_profile += [str(x)] file.write(sep.join(str_point_profile) + end) print(f"\033[38;5;41m({name})\033[0m profiles fit from " "\033[38;5;80m'data.json'\033[0m unpacked to " f"\033[38;5;80m'{file_name}'\033[0m")
def info_fit(name, kind='sims'): from os import chdir from os.path import basename, dirname from pprint import pprint import json from lib.utils import fit_dir, config_dir, dir_point if kind in ['s', 'sims', None]: kind = 'sims' elif kind in ['o', 'obs']: kind = 'obs' chdir(fit_dir(name)) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation already assigned to this fit.') d = {} for s in sims: if s[-1] == '/': s = s[:-1] if kind == 'sims': config = basename(dirname(s)) Point = dir_point(basename(s)) try: d[config] += [Point] except KeyError: d[config] = [Point] elif kind == 'obs': try: with open(s + '/measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: measures = {} flags = '' if 'cut' in measures.keys(): flags += 'C' if 'block' in measures.keys(): flags += 'B' if 'volume' in measures.keys(): flags += 'V' config = basename(dirname(s)) Point = dir_point(basename(s)) try: d[config] += [[Point, flags]] except KeyError: d[config] = [[Point, flags]] for k in d.keys(): d[k] = sorted(d[k]) if kind == 'sims': pprint(d) elif kind == 'obs': pprint(d) else: raise ValueError('info-fit: kind {kind} not recognized')
def fit_divergence(name, kind='volumes', reload=False): from os import chdir from os.path import basename, dirname, isfile from datetime import datetime import json from pprint import pprint from numpy import genfromtxt from lib.utils import fit_dir, dir_point from lib.analysis.fit import fit_divergence if kind in ['v', 'volumes']: kind = 'volumes' kind_file = 'volumes' elif kind in ['p', 'profiles']: kind = 'profiles' kind_file = 'profiles_length' elif kind in ['t', 'torelons']: kind = 'torelons' kind_file = 'torelons_length' elif kind in ['g', 'gauge-action']: kind = 'gauge-action' kind_file = 'gauge_action' elif kind in ['top', 'susc', 'top-susc']: kind = 'topological-susceptibility' kind_file = 'top_susc' else: raise ValueError(f'{kind} not available for divergence fit.') fit_d = fit_dir(name) chdir(fit_d) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: print('No simulation already assigned to this fit.') # do not return, because if 'kind.csv' is present it can use that if not isfile(f'{kind_file}.csv') or reload: d = {} lambdas = [] betas = [] means = [] errors = [] for s in sims: if s[-1] == '/': s = s[:-1] config = basename(dirname(s)) Point = dir_point(basename(s)) if isfile(s + '/max_volume_reached'): print(f'\033[38;5;41m{Point}\033[0m not included in fit, ' 'because ' '\033[38;5;80mmax_volume_reached\033[0m is present.') print(f"\033[38;5;80m config: '{config}'\033[0m") continue try: with open(s + '/measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: measures = {} with open(s + '/state.json', 'r') as file: state = json.load(file) if 'time' in measures.keys(): s_time = datetime.strptime(state['end_time'], '%d-%m-%Y %H:%M:%S') m_time = datetime.strptime(measures['time'], '%d-%m-%Y %H:%M:%S') else: print(f'Mising time in {Point}, in config: {config}.') return # print(Point) # print(s_time, type(s_time), '\n' + str(m_time), type(m_time)) if(s_time > m_time): print('\033[38;5;203mWarning:\033[0m in Point ' f'\033[38;5;41m{Point}\033[0m in ' f"\033[38;5;80mconfig: '{config}'\033[0m measures are " '\033[38;5;210mnot up to date\033[0m ' 'with last simulation\'s data') print() d[Point] = {'config': config, **measures, 'time_sim_end': state['end_time']} k_key = f'{kind}'[:-1] if k_key in measures.keys(): lambdas += [Point[0]] betas += [Point[1]] means += [measures[k_key][0]] errors += [measures[k_key][1]] else: print(f"Missing {k_key} in {Point}, in config: {config}.") return with open(f'{kind_file}.csv', 'w') as file: file.write('# Lambda Beta Volume Error Config\n') for Point, attr in d.items(): mean, err = attr[f'{kind}'[:-1]] data = [Point[0], Point[1], mean, err, attr['config']] line = ' '.join([str(x) for x in data]) file.write(line + '\n') else: data = genfromtxt(f'{kind_file}.csv', unpack=True) lambdas, betas = data[:2] if kind == 'gauge-action': means, errors = data[4:6] else: means, errors = data[2:4] fit_divergence(lambdas, means, errors, betas, kind=kind)