def load_from_archive(names, arch): cs = [] for name in names: cs.append(Chest(path = "{:s}-results".format(name), open = partial(glopen, endpoint=arch), open_many = partial(glopen_many, endpoint=arch), available_memory = 1e12)) scs = [CachedSlict(c) for c in cs] ps = [] for name in names: with glopen( "{:s}.json".format(name), mode='r', endpoint = arch, ) as f: ps.append(json.load(f)) if len(names) == 1: return cs[0], scs[0], ps[0] return cs, scs, ps
def test_CachedSlict_3d(): d = {} d[2, 2, 2] = 6 d[2, 2, 3] = 7 d[1, 2, 8] = 11 d[8, 2, 1] = 11 d[2, 3, 3] = 8 sd = CachedSlict(d) sd2 = sd[:, 2, :] for k in sd2: assert sd2[k] == sum(k) + 2 last = (0, 0) for k in sd2.keys(): assert k > last last = k items = sd2.items() vals = sd2.values() for k, v in items: assert v == sd2[k] assert v in vals
def test_CachedSlict_2d(): d = {} d[3, 2] = 5 d[2, 2] = 4 d[1, 2] = 3 d[8, 2] = 10 d[2, 3] = 5 sd = CachedSlict(d) sd2 = sd[:, 2] for k in sd2: assert sd2[k] == k + 2 last = 0 for k in sd2.keys(): assert k > last last = k items = sd2.items() vals = sd2.values() for k, v in items: assert v == sd2[k] assert v in vals
def test_CachedSlict_1d(): d = {} d[3] = 3 d[2] = 2 d[1] = 1 d[8] = 8 d[2] = 2 sd = CachedSlict(d) sd2 = sd[:] for k in sd2: assert sd2[k] == k last = 0 for k in sd2.keys(): assert k > last last = k items = sd2.items() vals = sd2.values() for k, v in items: assert v == sd2[k] assert v in vals
from json import loads img_format = 'eps' title = False parser = ArgumentParser(description="Plotter for smRTI data") parser.add_argument("--traj", action="store_true", default=False) parser.add_argument("--only", type=str, default=None) parser.add_argument("params", type=str, default="fit_results.p") args = parser.parse_args() with open("data_table.p", 'rb') as f: data_table_d = pickle.load(f) data_table = CachedSlict(data_table_d) from model import exp_mix, mix_model from model import exp_dyn, dyn_model from model import both_error, full_model, filter_trajectory with open(args.params, "rb") as f: results_in = pickle.load(f) if args.only is not None: todo = loads(args.only) else: todo = data_table[:, :, 'time'].keys() if not args.traj: todo = []
import pickle from slict import CachedSlict import numpy as np from scipy.optimize import minimize import cma from os.path import exists from scipy.interpolate import UnivariateSpline from scipy.optimize import basinhopping, minimize with open("data_table.p", 'rb') as f: data_table_d = pickle.load(f) data_table = CachedSlict(data_table_d) from model import filter_trajectory, error from model import dyn_error, guess_dyn, bounds_dyn, exp_dyn, scaling_dyn, bounds_dyn_t from model import mix_error, guess_mix, bounds_mix, exp_mix if exists("fit_results.p"): with open("fit_results.p", "rb") as f: results = pickle.load(f) with open("reg_fit_results.p", "rb") as f: reg_results = pickle.load(f) else: results = {} todo = list(data_table[:, :, 'time'].keys()) todo.reverse()
args = command_line_ui() # load params from genrun.py input dictionary import json #from utils.custom_json import CustomDecoder with open("{:s}.json".format(args.name), 'r') as f: params = json.load(f) # insert new results into the dictionary fname = '{:s}-results.dat'.format(args.name) #with open(fname, 'r') as f: # results = json.load(f, cls=CustomDecoder) from chest import Chest from slict import CachedSlict results = CachedSlict(Chest(path="{:s}-results".format(args.name))) from importlib import import_module xx = import_module(args.post) import time as clock start_time = clock.time() i = 0 #for time in results[:,"frame"].keys(): # xx.plot_frame(results[time,:], params, args) # i = i + 1 # print("Processed t={:f} ({:f} fps)".format(time, (clock.time() - start_time) / i)) # Post-post process the contents of the results dictionary xx.post_series(results, params, args)
from os.path import join workdirs = [join(getcwd(), x["name"]) for x in overrides] configs = [ configure(base, override, workdir) for override, workdir in zip(overrides, workdirs) ] data_table = {} max_index = -1 height = 'H_exp' for p, wd in zip(configs, workdirs): path = join(wd, "{}-results".format(p['name'])) print(path) if exists(path): c = Chest(path=path) sc = CachedSlict(c) times = sc[:, height].keys()[:max_index] data_table[p['viscosity'], p['conductivity'], 'time'] = np.array(times) data_table[p['viscosity'], p['conductivity'], 'height'] = np.array([sc[t, height] for t in times]) data_table[p['viscosity'], p['conductivity'], 'atwood'] = np.array( [4 * np.mean(sc[t, 't_abs_proj_z']) for t in times]) for k, v in p.items(): data_table[p['viscosity'], p['conductivity'], k] = v import pickle with open("data_table.p", "wb") as f: pickle.dump(data_table, f) print(data_table)