def local_launch(points, arg_strs): from os import system, chdir from os.path import abspath from subprocess import Popen from lib.utils import (point_dir, launch_script_name, make_script_name, project_folder) dirs = {Point: abspath(point_dir(Point)) for Point in points} for Point in points: chdir(dirs[Point]) arg_str = arg_strs[Point] run_num = int(arg_str.split()[1]) launch_script_n = launch_script_name(Point) make_script_n = make_script_name(Point) make_script = Popen([ "python3", make_script_n, str(run_num), str(Point[0]), str(Point[1]), project_folder() ]) make_script.wait() system('nohup python3 $PWD/' + launch_script_n + arg_str + ' &')
def up_launch(points_old, config, both, make, force): from os import getcwd, chdir from shutil import copyfile from lib.utils import (find_running, authorization_request, point_dir, launch_script_path, make_script_path, launch_script_name, make_script_name) proj_dir = getcwd() points_run, _ = find_running() points_run = [x[0] for x in points_run if x[1] == config] for p in points_old: if p not in points_run: if not config == 'test' and not force: what_to_do = "to update launch/make scripts" authorized = authorization_request(what_to_do, p) else: authorized = True if authorized != 'quit' and authorized: chdir('output/' + config + '/' + point_dir(p)) if both or make: make_script_n = make_script_name(p) copyfile(make_script_path(), make_script_n) if both or not make: launch_script_n = launch_script_name(p) copyfile(launch_script_path(), launch_script_n) print('Update complete for (λ, β) = ' + str(p)) chdir(proj_dir) elif authorized == 'quit': return
def volumes_plot(configs=None, path=None): from os.path import isfile import matplotlib.pyplot as plt from lib.utils import point_dir, config_dir mvs = mean_volumes(configs, print_flag=False) curves = {} for c, sims in mvs.items(): for Point, mv in sims.items(): if isfile( config_dir(c) + '/' + point_dir(Point) + '/max_volume_reached'): continue curve = c + ' β=' + str(Point[1]) coords = (Point[0], mv) try: curves[curve] += [coords] except KeyError: curves[curve] = [coords] for lab, points in curves.items(): points = sorted(points, key=lambda x: x[0]) plt.plot(*zip(*points), marker='o', label=lab) plt.xlabel('λ') plt.ylabel('Volume') plt.legend() plt.show()
def set_fit_props(name, points, config, remove): from os import chdir, popen from os.path import basename, dirname import json from lib.utils import (fit_dir, config_dir, point_dir, dir_point, authorization_request) if remove: if points: print('Warning: points specification not compatible with --remove ' 'option.') return elif config != 'test': print('Warning: config specification not compatible with --remove ' 'option.') return chdir(fit_dir(name)) try: with open('sims.json', 'r') as file: sims = json.load(file) except FileNotFoundError: sims = [] # SIMS UPDATE if not remove: c_dir = config_dir(config) for Point in points: p_dir = c_dir + '/' + point_dir(Point) if p_dir not in sims: sims += [p_dir] with open('sims.json', 'w') as file: json.dump(sims, file, indent=4) # SIMS REMOTION else: new_sims = sims.copy() for sim in sims: Point = dir_point(basename(sim)) config = basename(dirname(sim)) what = f"to remove sim from fit '{name}'" extra = f"\033[38;5;80m config: '{config}'\033[0m" auth = authorization_request(Point=Point, what_to_do=what, extra_message=extra) if auth == 'quit': print('Nothing done for last sim.') return elif auth == 'yes': new_sims.remove(sim) with open('sims.json', 'w') as file: json.dump(new_sims, file, indent=4) print('Sim removed') else: print('Nothing removed.')
def sim_info(Point, config): from os import chdir from lib.utils import point_dir, config_dir chdir(config_dir(config) + '/' + point_dir(Point)) import json with open('state.json', 'r') as state_file: state = json.load(state_file) print(json.dumps(state, indent=4)[1:-1])
def clear_data(points, config='test', force=False): """Remove data for a given value of point Parameters ---------- Point : float the parameter of the simulation whose data you want to remove """ from os import scandir from shutil import rmtree from lib.utils import (find_all_availables, find_running, authorization_request, config_dir, point_str, point_dir) points_run, _ = find_running() points_run = [x[0] for x in points_run if x[1] == config] points_req_run = [x for x in points if x in points_run] points_clearable = [x for x in points if x not in points_run] if len(points_req_run) > 0: print("Simulations for following λ are running: ", points_req_run, '\n so they are not clearable') if len(points_req_run) > 0 and len(points_clearable) > 0: print() if len(points_clearable) == 0: print("No λ found in the requested range.") for Point in points_clearable: try: if not config == 'test' and not force: what_to_do = "to remove simulation folder" authorized = authorization_request(what_to_do, Point) else: authorized = 'yes' if authorized == 'yes': rmtree(config_dir(config) + "/" + point_dir(Point)) if force: print("(λ = ", Point[0], ", β = ", Point[1], ") ", sep='', end='') print("Simulation folder removed.") elif authorized == 'quit': print(f'Nothing done for last point {Point}.') return except FileNotFoundError: all_points = find_all_availables() raise ValueError("A folder with the given point doesn't exist"+ "\n\t\t\t all_points: " + str(all_points)) if all([not 'Beta' in str(x) for x in scandir(config_dir(config))]): return True else: return False
def recovery(points_old, points_new, config, force, very_forced): from os import getcwd, chdir from lib.tools import recovery_history from lib.utils import authorization_request, point_dir if len(points_new) > 0: print("Following (λ, β) not found: ", points_new) if len(points_old) > 0: print() proj_dir = getcwd() for Point in points_old: if not config == 'test' and not force: what_to_do = "to recovery simulation data" authorized = authorization_request(what_to_do, Point) else: authorized = True if authorized: chdir('output/' + config + point_dir(Point)) recovery_history(very_forced) chdir(proj_dir)
def slurm_launch(points, arg_strs, queue, arch, file): from os import system, chdir, chmod, makedirs from os.path import realpath, basename from subprocess import Popen from time import time from datetime import datetime from lib.utils import (point_dir, launch_script_name, make_script_name, project_folder) # raise RuntimeError('support for marconi still missing') if queue in ['p', 'prod']: queue = 'prod' qtime = '23:59' elif queue in ['d', 'dbg', 'debug']: queue = 'dbg' qtime = '00:29' else: raise RuntimeError('slurm_launch: queue not recognized') if arch == 'skl': account = 'INF19_npqcd_0' c_sz = 48 # chunk_size elif arch == 'knl': account = 'IscrB_TOPPSI' c_sz = 68 # chunk_size else: raise RuntimeError('slurm_launch: arch not recognized') dirs = {Point: realpath(point_dir(Point)) for Point in points} points_chunks = [ points[c_sz * i:c_sz * (i + 1)] for i in range(len(points) // c_sz + 1) ] i = 0 for chunk in points_chunks: i += 1 def make_str(p): run_num = int(arg_strs[p].split()[1]) make_args = str(run_num) + ' ' + str(p[0]) + ' ' + str(p[1]) + \ ' ' + project_folder() return make_script_name(p) + ' ' + make_args def launch_str(p): return launch_script_name(p) + arg_strs[p] points_makers = [make_str(p) for p in chunk] points_launchers = [launch_str(p) for p in chunk] points = ['('] for j in range(len(chunk)): points += [ '"point_dir=\'' + point_dir(chunk[j]) + '\' ' + # 'make=\'' + points_makers[j] + '\' ' + 'launch=\'' + points_launchers[j] + '\'"' ] chdir(dirs[chunk[j]]) make_script = Popen(["python3", *points_makers[j].split()]) make_script.wait() points += [')'] points = '\n'.join(points) file_name = basename(file) sbatch_dir = file_name[:-4] time_ = datetime.fromtimestamp(time()).strftime('%d-%m-%Y_%H:%M:%S') jobname = f'CDT2D_{time_}--{i}' scripts_dir = project_folder() + '/lib/scripts' with open(scripts_dir + '/sbatch.sh', 'r') as sbatch_template: chunk_script = eval('f"""' + sbatch_template.read() + '"""') # if queue == 'dbg': if file[-3:] != '~~~': chunk_script += (f'\n\npython3 {project_folder()}/launcher.py ' f'run --file {file}') try: makedirs('../' + sbatch_dir) except FileExistsError: pass chdir('../' + sbatch_dir) sbatch_file = realpath(jobname + '.sh') with open(sbatch_file, 'w') as sbatch_script: sbatch_script.write(chunk_script) chmod(sbatch_file, 0o777) system('sbatch ' + sbatch_file)
def launch(points_old, points_new, config, linear_history, end_time, end_steps, force, time_lengths, adj, max_volume, move22, move24, move_gauge, fake_run, debug, queue, arch, file): """Output analysis for CDT_2D simulation. attempts_str = str(attempts) Parameters ---------- points_old : type Description of parameter `points_old`. points_new : type Description of parameter `points_new`. config : type Description of parameter `config`. linear_history : type Description of parameter `linear_history`. time : type Description of parameter `time`. steps : type Description of parameter `steps`. force : type Description of parameter `force`. time_lengths : type Description of parameter `time_lengths`. fake_run : type Description of parameter `fake_run`. debug : type Description of parameter `debug`. Raises ------ Exception descrizione dell'eccezione lanciata """ from os import mkdir, chdir, getcwd, scandir from os.path import isfile, isdir from shutil import copyfile from re import split, sub from platform import node import json from lib.utils import (find_running, point_dir, point_str, moves_weights, authorization_request, end_parser, launch_script_name, make_script_name, config_dir, project_folder) from lib.platforms import launch_run # set moves' weights move22, move24, move_gauge = moves_weights(move22, move24, move_gauge) points_run, _ = find_running() points_run = [x[0] for x in points_run if x[1] == config] points_old_auth = [] # old ones which will get the authorization to rerun points_req_run = [] # those requested which are already running for Point in points_old: if Point not in points_run: if not config == 'test' and not force: what_to_do = "to rerun simulation" authorized = authorization_request(what_to_do, Point) else: authorized = 'yes' if authorized == 'yes': points_old_auth += [Point] elif authorized == 'quit': print('No simulation launched.') return else: points_req_run += [Point] points = points_old_auth + points_new if len(points_new) > 0: print("New simulations will be launched for following (λ, β): ", points_new) if len(points_old_auth) > 0: print("Old simulations will be rerunned for following (λ, β): ", points_old_auth) if len(points_req_run) > 0: print("Simulations for following (λ, β) were already running: ", points_req_run) if len(points) > 0: print() arg_strs = {} for Point in points: chdir(config_dir(config)) dir_name = point_dir(Point) launch_script_n = launch_script_name(Point) make_script_n = make_script_name(Point) if Point in points_old: if not isdir(dir_name + "/history/adjacencies"): mkdir(dir_name + "/history/adjacencies") with open(dir_name + "/state.json", "r+") as state_file: state = json.load(state_file) if state['is_thermalized']: print('((λ, β) = ' + str(Point) + ') Ha già finito!') # @todo da migliorare continue if state['last_run_succesful']: run_num = state['run_done'] + 1 else: print('((λ, β) = ' + str(Point) + ') Problem in the last run') continue if state['is_thermalized'] and linear_history == '0': linear_history = '1M' # I'm putting the default because this case is present only for # backward compatibility, and before the timelength was stuck to 80 try: time_length = state['timelength'] except KeyError: time_length = time_lengths[Point] checkpoints = [ x.name for x in scandir(dir_name + "/checkpoint") if (split('_|\.|run', x.name)[1] == str(run_num - 1) and x.name[-4:] != '.tmp') ] # nell'ordinamento devo sostituire i '.' con le '~', o in generale # un carattere che venga dopo '_', altrimenti 'run1.1_...' viene # prima di 'run1_...' checkpoints.sort(key=lambda s: s.replace('.', '~')) last_check = checkpoints[-1] else: mkdir(dir_name) mkdir(dir_name + "/checkpoint") mkdir(dir_name + "/history") mkdir(dir_name + "/history/adjacencies") mkdir(dir_name + "/bin") make_template = project_folder() + '/lib/scripts/make_script.py' launch_template = project_folder( ) + '/lib/scripts/launch_script.py' copyfile(make_template, dir_name + '/' + make_script_n) copyfile(launch_template, dir_name + '/' + launch_script_n) if fake_run: print('Created simulation directory for: (Lambda= ' + str(Point[0]) + ', Beta= ' + str(Point[1]) + ')') run_num = 1 last_check = None time_length = time_lengths[Point] # devo farlo qui perché prima non sono sicuro che dir_name esista # ('mkdir(dir_name)') chdir(dir_name) if isfile('max_volume_reached'): print(f'Point {Point} won\'t be relaunched because it reached ' 'maximum volume available in the previous run.') continue if int(run_num) > 1: from lib.tools import recovery_history recovery_history() if linear_history != '0' and not state['linear-history']: state['linear-history-cut'] = state['iter_done'] if state['linear-history']: if linear_history == '0': print('\033[38;5;69mWarning:\033[0m') print(f"Point {Point} has been already run with " f"linear_history {state['linear-history']}, so this " f"value will be used.") elif linear_history != state['linear-history']: print('\033[38;5;69mWarning:\033[0m') print(f"Point {Point} has been already run with " f"linear_history {state['linear-history']}, so this " f"will be used instead of: {linear_history}.") linear_history = state['linear-history'] # ensure state_file existence or update it if int(run_num) == 1: state = { 'Lambda': Point[0], 'Beta': Point[1], 'run_done': 0, 'is_thermalized': False, 'last_checkpoint': None, 'iter_done': 0, 'timelength': time_length } with open('state.json', 'w') as state_file: json.dump(state, state_file, indent=4) # END CONDITION MANIPULATION # ricongiungo le due variabili perché è ancora facile # distinguerle dall'ultimo carattere if end_steps != '0': end_condition = end_steps else: end_condition = end_time # needed for thermalization loop end_partial, end_condition, end_type = end_parser(end_condition) if linear_history != '0': # i.e. `if linear_history:` if end_type == 'time': end_partial = str(end_condition) + 's' else: end_partial = end_condition # set debug_flag for c++ (in c++ style) debug_flag = str(debug).lower() # set adj_flag for c++ (in c++ style) adj_flag = str(adj).lower() # max_volume max_volume = int(max_volume[0] if type(max_volume) == list else max_volume) # is necessary to recompile each run because on the grid the launch node # could be different from run_node exe_name = "CDT_2D-" + point_str(Point) #+ "_run" + str(run_num) arguments = [ project_folder(), run_num, Point[0], Point[1], time_length, end_condition, debug_flag, last_check, linear_history, adj_flag, move22, move24, max_volume, end_partial, end_type, exe_name ] arg_str = '' for x in arguments: arg_str += ' ' + str(x) arg_strs[Point] = arg_str if fake_run: print() print(*(["bin/" + exe_name] + arguments[:8])) if not fake_run: from lib.platforms import launch_run points = list(arg_strs.keys()) launch_run(points, arg_strs, config, queue, arch, file)
def plot(points_old, config, gauge=False): from matplotlib.pyplot import figure, show import numpy as np from lib.utils import find_running, point_dir, config_dir points_run, _ = find_running() points_run = [x[0] for x in points_run if x[1] == config] color_cycle = [ 'xkcd:carmine', 'xkcd:teal', 'xkcd:peach', 'xkcd:mustard', 'xkcd:cerulean' ] n_col = len(color_cycle) canvases = [] props = [] i = -1 for Point in points_old: i += 1 Lambda = Point[0] Beta = Point[1] if not gauge: vol_file = (config_dir(config) + '/' + point_dir(Point) + '/history/volumes.txt') indices, volumes = np.loadtxt(vol_file, unpack=True) file = vol_file else: vol_file = (config_dir(config) + '/' + point_dir(Point) + '/history/volumes.txt') v_indices, volumes = np.loadtxt(vol_file, unpack=True) gauge_file = (config_dir(config) + '/' + point_dir(Point) + '/history/gauge.txt') g_indices, gauge_action, topological_charge, \ av_contr = np.loadtxt(gauge_file, unpack=True) v_indices = tuple([np.searchsorted(v_indices, g_indices)]) indices = g_indices volumes = 6 * (gauge_action / volumes[v_indices]) / Beta file = gauge_file fig = figure() ax = fig.add_subplot(111) props += [{'fig': fig, 'ax': ax, 'skip': len(volumes), 'file': file}] canvases += [fig.canvas] fig.set_size_inches(7, 5) ax.plot(indices, volumes, color=color_cycle[i % n_col]) if Point in points_run: run_t = ', running' else: run_t = '' title = 'λ = ' + str(Point[0]) + ', β = ' + str(Point[1]) + run_t if gauge: title = 'average plaquette\n' + title ax.set_title(title) def on_key(event): i = canvases.index(event.canvas) p = props[i] if event.key == 'f5': try: ind_aux, vol_aux = np.loadtxt(p['file'], unpack=True, skiprows=p['skip']) except ValueError: ind_aux = [] vol_aux = [] try: if len(vol_aux) > 10: props[i]['skip'] += len(vol_aux) p['ax'].plot(ind_aux, vol_aux, color=color_cycle[i % n_col]) p['fig'].canvas.draw() except TypeError: pass if not gauge: fig.canvas.mpl_connect('key_press_event', on_key) show()
def stop(points_old, config, is_all, pids, force): # @todo: distinguere fra processi 'data' e 'test' # si fa con un argomento config che viene da args.is_data from os import environ, system, popen import pickle from lib.utils import find_running, point_dir, config_dir points_run, sim_info = find_running() points_run = [x[0] for x in points_run if x[1] == config] # if config or not is_all: # if not config: # config = 'test' # points_run = [x[0] for x in points_run if x[1] == config] # else: # points_run = [x[0] for x in points_run] running_pids = [ps_i[2] for ps_i in sim_info] if pids: for pid in pids: if pid in running_pids or force: ps_out = popen('ps -fu ' + environ['USER']).read().split('\n') for line in ps_out[1:-1]: infos = line.split() ps_pid = int(infos[1]) ps_ppid = int(infos[2]) if ps_pid == pid or ps_ppid == pid: if ps_ppid != 1: system('kill ' + str(ps_ppid)) system('kill ' + str(ps_pid)) else: print(f'Invalid PID: {pid}, ' + 'there is no running simulation with that one.') return try: with open(config_dir(config) + '/pstop.pickle', 'rb') as stop_file: points_stopped = pickle.load(stop_file) except FileNotFoundError: points_stopped = [] l_aux = [] for Point in points_stopped: if Point in points_run: l_aux += [Point] points_stopped = l_aux points_stopping = [] points_notstop = [] for Point in points_old: if Point in points_run and Point not in points_stopped: points_stopped += [Point] points_stopping += [Point] from os import system from time import time from datetime import datetime sf = '%d-%m-%Y %H:%M:%S' system('echo ' + datetime.fromtimestamp(time()).strftime(sf) + ' > ' + config_dir(config) + '/' + point_dir(Point) + '/stop') # forse '.stop' anzichè 'stop' else: points_notstop += [Point] with open(config_dir(config) + '/pstop.pickle', 'wb') as stop_file: pickle.dump(points_stopped, stop_file) if len(points_stopping) > 0: print("Simulations for following (λ, β) just stopped: ", points_stopping) if len(points_notstop) > 0 and not is_all: print("Simulations for following (λ, β) were not running: ", points_notstop)
def show_state(configs, full_show=False): # @todo: add support for the other platforms # @todo: for clusters: add 'pending' state import pickle import json from os import environ, popen from platform import node from datetime import datetime from time import time from lib.utils import find_running, point_dir, config_dir from lib.platforms import get_ps_out if not type(configs) == list: configs = [configs] # if node() == 'Paperopoli' or node() == 'fis-delia.unipi.it': ps_out = get_ps_out() # else: # ps_out = [] # print("This platform is still not supported") # return empty = len(['' for line in ps_out if ' bin/CDT_2D-Lambda' in line]) == 0 if len(ps_out) > 1 and not empty: print(' LAMBDA-λ BETA-β TIME STATUS CONFIG', end='') if full_show == '0': print() elif full_show == '1': print(' RUN_ID PID PPID') elif full_show == '2': print(' LIN_HIST START DISK US.') points_run_all, sim_all = find_running() configs += ['-'] d = {} for config in configs: points_run_list = [] sim_list = [] for i in range(len(sim_all)): try: if points_run_all[i][1] == config: points_run_list += [points_run_all[i]] sim_list += [sim_all[i]] except IndexError: # It's possible that find_running can't retrieve the config # for some simulations, these will be manged after pass d[config] = points_run_list, sim_list # for i in range(len(sim_all)): # if len(points_run_all[i]) == 1: # points_run_list += [points_run_all[i] + ['-']] # sim_list += [sim_all[i]] # d['-'] = points_run_list, sim_list else: print("There are no running simulations currently.") return for config in configs: try: if config == '-': raise FileNotFoundError with open(config_dir(config) + '/pstop.pickle', 'rb') as stop_file: points_stopped = pickle.load(stop_file) except FileNotFoundError: points_stopped = [] if len(ps_out) > 1 and not empty: points_run_list, sim_list = d[config] for i in range(0, len(sim_list)): points_run = points_run_list[i] sim = sim_list[i] Point = points_run[0] l_conf = points_run[1] if Point in points_stopped: state = 'killed' else: state = 'running' print(r_f(str(Point[0]), 9), r_f(str(Point[1]), 9), sim[0].rjust(11), ' ', state.ljust(10), l_conf.ljust(11), end='') if full_show == '0': print() elif full_show == '1': print(sim[1].rjust(4), ' ', sim[2].ljust(8), sim[3].ljust(8)) elif full_show == '2': if config != '-': Point_dir = config_dir(config) + '/' + \ point_dir(Point) + '/' with open(Point_dir + 'state.json', 'r') as state_file: state = json.load(state_file) lin_hist = state['linear-history'] start = state['start_time'] disk_us = popen('du -hs ' + Point_dir).read().split()[0] else: lin_hist = '-' start = '-' disk_us = '-' print( str(lin_hist).ljust(9), start[-8:].ljust(10), disk_us.rjust(8)) points_run = [x[0] for x in points_run_list if x[1] == config] l_aux = [] for Point in points_stopped: if Point in points_run: l_aux += [Point] points_stopped = l_aux else: points_stopped = [] if config != '-': with open(config_dir(config) + '/pstop.pickle', 'wb') as stop_file: pickle.dump(points_stopped, stop_file) if len(ps_out) > 1 and not empty: clock = datetime.fromtimestamp(time()).strftime('%H:%M:%S') print('\n [CLOCK: ' + clock + ']')
def refit_compute(args): from os import chdir from os.path import isfile from time import time from datetime import datetime import json import numpy as np import matplotlib.pyplot as plt from lib.utils import point_dir, authorization_request from lib.analysis.fit import fit_decay2 Point, points_configs, c_dir, i, force, plot, exclude_torelons = args if points_configs: c_dir = points_configs[Point] p_dir = c_dir + '/' + point_dir(Point) chdir(p_dir) if isfile(p_dir + '/max_volume_reached'): print(f'\033[38;5;41m(λ, β) = {Point}\033[0m skipped because ' '\033[38;5;80mmax_volume_reached\033[0m is present.') # print(f"\033[38;5;80m config: '{config}'\033[0m") return 'continue' try: with open('measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: print(f'\033[1mCRITICAL:\033[0m no measures.json file in sim' f'\033[38;5;41m(λ, β) = {Point}\033[0m') return 'continue' if not force: what = 'to refit correlation lengths' auth = authorization_request(what_to_do=what, Point=Point) else: auth = 'yes' # print("☙ \033[38;5;41m(λ, β) = " + str(Point) + "\033[0m") if auth == 'yes': # Compute torelons lengths try: t_mean, t_std = measures['torelon-decay'] torelons_decay_mean = np.array(t_mean) torelons_decay_std = np.array(t_std) print('\nTORELONS:') p_fit, par, cov, χ2 = fit_decay2(torelons_decay_mean, torelons_decay_std) if all([x is not None for x in [p_fit, par]]): x = np.linspace(0, len(torelons_decay_mean) - 1, 1001) y = p_fit plt.plot(x, y, 'tab:green', label='fit') plt.plot(torelons_decay_mean, 'tab:blue', label='bootstrap mean') plt.plot(torelons_decay_mean + torelons_decay_std, 'tab:red') plt.plot(torelons_decay_mean - torelons_decay_std, 'tab:red', label='bootstrap std') plt.title('TORELON:\n ') # f'Number of points: {len(indices_cut)}') plt.legend() plt.savefig('torelon.pdf') if plot and not force: plt.show() torelons_fit = {'par': None if par is None else par.tolist(), 'cov': None if cov is None else cov.tolist(), 'chi2': χ2[0], 'dof': χ2[1]} except KeyError: torelons_fit = None plt.clf() # Compute profiles lengths try: p_mean, p_std = measures['profiles_corr'] profiles_corr_mean = np.array(p_mean) profiles_corr_std = np.array(p_std) print('\nPROFILES:') p_fit, par, cov, χ2 = fit_decay2(profiles_corr_mean, profiles_corr_std) if all([x is not None for x in [p_fit, par]]): # if False and par is not None: x = np.linspace(0, len(profiles_corr_mean) - 1, 1001) y = p_fit plt.plot(x, y, 'tab:green', label='fit') plt.plot(profiles_corr_mean, 'tab:blue', label='bootstrap mean') plt.plot(profiles_corr_mean + profiles_corr_std, 'tab:red') plt.plot(profiles_corr_mean - profiles_corr_std, 'tab:red', label='bootstrap std') plt.title('PROFILE CORR.:\n ') # f'Number of points: {len(indices_cut)}') plt.legend() plt.savefig('profile.pdf') if plot and not force: plt.show() profiles_fit = {'par': None if par is None else par.tolist(), 'cov': None if cov is None else cov.tolist(), 'chi2': χ2[0], 'dof': χ2[1]} except KeyError: profiles_fit = None # Save results if torelons_fit and None not in torelons_fit.values(): measures['torelon-decay-fit2'] = torelons_fit if profiles_fit and None not in profiles_fit.values(): measures['profiles-corr-fit2'] = profiles_fit measures['time2'] = datetime.fromtimestamp(time() ).strftime('%d-%m-%Y %H:%M:%S') with open('measures.json', 'w') as file: json.dump(measures, file, indent=4) elif auth == 'quit' or auth == 'eof': print('Observables have not been recomputed.') return 'return' else: print('Observables have not been recomputed.')
def sim_obs(points, config, plot, fit, exclude_torelons, exclude_bootstrap, fit_name, force): from os import chdir from os.path import isfile, basename, dirname, realpath from time import time from datetime import datetime import json from pprint import pprint from lib.utils import (config_dir, point_dir, dir_point, fit_dir, authorization_request, eng_not) from lib.analysis.fit import (set_cut, set_block, eval_volume, eval_top_susc, eval_action, eval_action_density, compute_torelons, compute_profiles_corr) (Point, points_configs, c_dir, i, force, plot, fit, exclude_torelons, exclude_bootstrap) = args if points_configs: c_dir = points_configs[Point] p_dir = c_dir + '/' + point_dir(Point) chdir(p_dir) vol = None if isfile(p_dir + '/max_volume_reached'): print(f'\033[38;5;41m(λ, β) = {Point}\033[0m skipped because ' '\033[38;5;80mmax_volume_reached\033[0m is present.') # print(f"\033[38;5;80m config: '{config}'\033[0m") return 'continue' try: with open('state.json', 'r') as file: state = json.load(file) except FileNotFoundError: print(f'\033[1mCRITICAL:\033[0m no state.json file in sim' f'\033[38;5;41m(λ, β) = {Point}\033[0m') return 'continue' try: with open('measures.json', 'r') as file: measures = json.load(file) if 'cut' in measures.keys() and 'block' in measures.keys(): cb_exist = True else: cb_exist = False except FileNotFoundError: measures = {} cb_exist = False if not force: what = 'to select cut & block' extra = ('\033[92m(existing value present for both)\033[0m' if cb_exist else None) auth = authorization_request(what_to_do=what, Point=Point, extra_message=extra) else: print("\033[38;5;41m(λ, β) = " + str(Point) + "\033[0m ") auth = 'yes' if auth == 'quit' or auth == 'eof': print('Nothing done on the last sim.') return 'return' elif auth == 'yes': try: measures['cut'] = state['linear-history-cut'] cut = state['linear-history-cut'] with open('measures.json', 'w') as file: json.dump(measures, file, indent=4) print("\033[38;5;80m'linear-history-cut'\033[0m " "has been used as cut") except KeyError: cut = set_cut(p_dir, i, force) if cut: measures['cut'] = cut with open('measures.json', 'w') as file: json.dump(measures, file, indent=4) try : cut = measures['cut'] except KeyError: pass if cut: print(f'cut = {eng_not(cut)} ({cut})', end=' ') block = set_block(p_dir, i, force) if block: measures['block'] = block with open('measures.json', 'w') as file: json.dump(measures, file, indent=4) try: block = measures['block'] except KeyError: pass print(f'block = {eng_not(block)} ({block})', end=' ') if not cut or not block: print('\nNothing modified on last sim.') return 'return' vol = eval_volume(p_dir) if not force: what = 'to compute/recompute observables' auth = authorization_request(what_to_do=what) else: auth = 'yes' # print("☙ \033[38;5;41m(λ, β) = " + str(Point) + "\033[0m") if auth == 'yes': try: with open('measures.json', 'r') as file: measures = json.load(file) except FileNotFoundError: measures = {} if force: if not 'cut' in measures.keys(): print('cut not set') return 'continue' if not 'block' in measures.keys(): print('block not set') return 'continue' measures['volume'] = vol if vol else eval_volume(p_dir) # measures['action'] = eval_action(p_dir) # measures['action-density'] = eval_action_density(p_dir) # measures['top-susc'] = eval_top_susc(p_dir, force=force) # if not exclude_torelons and not exclude_bootstrap: # torelons_output = compute_torelons(p_dir, plot, fit, force=force) # if torelons_output: # measures['torelon-decay'] = torelons_output[:2] # if None not in torelons_output[2].values(): # measures['torelon-decay-fit'] = torelons_output[2] if not exclude_bootstrap: profiles_output = compute_profiles_corr(p_dir, plot, fit, force=force) measures['profiles_corr'] = profiles_output[:2] if None not in profiles_output[2].values(): measures['profiles_corr_fit'] = profiles_output[2] measures['time'] = datetime.fromtimestamp(time() ).strftime('%d-%m-%Y %H:%M:%S') with open('measures.json', 'w') as file: json.dump(measures, file, indent=4) elif auth == 'quit' or auth == 'eof': print('Observables have not been recomputed.') return 'return' else: print('Observables have not been recomputed.')