def load_cfd_data(name, ext=".npy"): # data_loc = os.path.dirname(__file__) data_file = name + ext data_loc = find_dir(data_file) print(f"load_cfd_data: file={data_file}", end="\r") if not data_loc: if ext == ".npy": return load_cfd_data(name, ext=".npz") else: raise FileNotFoundError( f"load_cfd_data: ファイルが見つかりません ({data_file})") with chdir(data_loc): if os.path.isfile(data_file): if ext == ".npy": data = np.load(data_file) else: with np.load(data_file) as npf: # print(f"load_cfd_data: name={name} files={npf.files}", end="\r") data = npf['arr_0'] # elif ext == ".npy": # return load_cfd_data(name, ext=".npz") # else: # raise FileNotFoundError(f"load_cfd_data: file not found ({data_file})") print(" " * 80, end="\r") # print("load_cfd_data: done", end="\r") return data
def rewrite_resume(resume): if not os.path.isdir(resume): raise FileNotFoundError(resume) with ut.chdir(resume): plts = glob.glob('*.plt') if not plts: return False last = max(map(int, (re.search(r'\d+', f)[0] for f in plts))) with open('in2d.txt', 'r', encoding='utf-8') as f: lines = f.readlines() iout = int(re.findall(r'\d+', lines[30])[0]) with open('in2d.txt', 'w', encoding='utf-8') as f: for i, l in enumerate(lines): if i == 15: ll = re.split(r'(?<= )(?=\S)', l) ll[1] = f'{(last+1)*iout:<9} ' l = ''.join(ll) elif i == 27: ll = re.split(r'(?<= )(?=\S)', l) ll[4] = f'{last+1}\n' l = ''.join(ll) f.write(l) for file in ('in2d.txt', 'grid.csv', 'grid.png'): shutil.copy(file, '../') return True
def plot_loss_ex(trainer): fig, ax = plt.subplots() for d in ['top', 'right']: ax.spines[d].set_visible(False) ylim_low = float('inf') ylim_upp = 0 try: with ut.chdir(trainer.out): if not os.path.isfile('log.json'): return log = ut.load('log.json', from_json=True) for key in ('main/loss', 'val/main/loss'): a = np.array([l[key] for l in log]) a = np.clip(a, 0, 1e6) ax.plot(a, label=key) ylim_upp = max(np.ceil(np.max(a[min(len(a)-1,3):50]))/1000*1000, ylim_upp) ylim_low = min(np.min(a)//1000*1000, ylim_low) ax.set_ylim((ylim_low, ylim_upp)) ax.set_xlabel('epoch') ax.grid(True) fig.legend() fig.savefig('loss1.png') finally: plt.close(fig) sleep(10)
def read_cfd_data(path, begin=0, size=None, step=1): """ 呼び出し: read_plt """ with chdir(path): files = glob.glob("out_*.plt")[begin:begin + size if size else -1:step] return np.fromiter(read_plt(x) for x in files)
def save_cfd_data_all(): """ naca0012のデータを書き込み (v2) *直接呼ぶ """ print("save_cfd_data_all: start") cfd_dir = "/path/to/CFD" data_dirs = [ "naca0012_t102_d1_re104_a0", "naca0012_t102_d1_re104_a4", "naca0012_t102_d1_re104_a8", "naca0012_t102_d1_re104_a12" ] file_fmt = "out_%04d.plt" begins = [0, 1000] size = 1000 step = 1 for data_dir in data_dirs: for begin in begins: datafile = f"{data_dir}_{begin:0>4}-{begin+size-1:0>4}of{2000:0>4}.npz" if os.path.isfile(datafile): continue with chdir(f"{cfd_dir}/{data_dir}"): data = [ read_plt(file_fmt % i) for i in range(begin, begin + size, step) ] np.savez_compressed(datafile, data) print("save_cfd_data_all: done")
def mk_v(dest, file='out.mp4'): if not FFMPEG: return with ut.chdir(os.path.join(dest, nm)): subprocess.call([ FFMPEG, '-framerate', '30', '-y', '-i', 'out_%04d.png', '-vcodec', 'libx264', '-pix_fmt', 'yuv420p', '-r', '30', file ])
def load_grid_naca0012(key): """ EX: key=a0, begin=1000 """ grid_dir = os.path.dirname(__file__) grid_file = f"grid_naca0012_480x960_{key}.csv" with chdir(grid_dir): data = np.loadtxt(grid_file, delimiter=",", dtype=np.uint8) return data
def load_grid(name, dtype=np.uint8): data_file = name + ".csv" data_loc = find_dir(data_file) if not data_loc: raise FileNotFoundError(f"load_grid: ファイルが見つかりません ({data_file})") with chdir(data_loc): return np.loadtxt(data_file, delimiter=",", dtype=np.float32)
def load_grid_manual(): """ 格子のデータを読み込み """ key = "a4" grid_loc = f'/path/to/naca0012_t102_d1_re104_{key}' grid_file = "grid.csv" with chdir(grid_loc): data = np.loadtxt(grid_file, delimiter=",", dtype=np.uint8) return data
def save_cfd_data_all(): """ naca0012のデータを書き込み *直接呼ぶ """ def selector(d): return os.path.isdir(d) and glob.glob(f"{d}/out_*.plt") def extract(x, y, u, v, p, f, c): return [u, v, p, c] cfd_dir = "/path/to/cfd" file_fmt = lambda i: "out_%04d.plt" % i size = 100 step = 1 print("save_cfd_data_all: start") with chdir(cfd_dir): data_dirs = (x for x in glob.glob("*") if selector(x)) for data_dir in data_dirs: files = glob.glob(f"{data_dir}/out_*.plt") datasize = len(files) // 100 * 100 print(f"dir: {data_dir} data: {datasize}") begins = (size * i for i in range(datasize // size)) for begin in begins: with chdir(data_dir): shape = "{0}x{1}".format( *read_plt(file_fmt(0), extract).shape) datafile = f"{data_dir}_uvpc_{shape}_{begin:0>4}-{begin+size:0>4}of{datasize:0>4}.npz" if os.path.isfile(datafile): continue data = [ read_plt(file_fmt(i), extract) for i in range(begin, begin + size, step) ] np.savez_compressed(datafile, data) print("save_cfd_data_all: done")
def remove_samll_dirs(): with ut.chdir(SRC_DIR): dirs = ut.globm('**/__result__/*') size = 0 for d in dirs: dirname = os.path.abspath(d) dirsize = ut.filesize(dirname) snapshots = [f for f in os.listdir(dirname) if f.startswith('snapshot_')] print(d, f'snapshots={len(snapshots)}', end='') if len(snapshots) < 1 and ut.filesize(d) < 100 * ut.MB1: print(' => Remove') shutil.rmtree(dirname) size += dirsize else: print('\r') with ut.chdir(dirname): size += thin_snapshot(snapshots, d) print(f'free: {size/1048576:.2f}MB')
def main1(): odir = 'test' with ut.chdir(odir): times = sorted((os.path.getmtime(file), file) for file in ut.iglobm('__raw__/*.plt') if int(re.search(r'\d+', file)[0]) < 1000) a = np.array([t2[0] - t1[0] for t1, t2 in zip(times[:-1], times[1:])]) list(map(print, (t[1] for t in times))) plt.plot(a) plt.xlim(0, len(a) - 1) # plt.ylim(0, 1000) plt.show()
def comp_main(): def read(files): for file in files: name = re.search(r'out_\d+', file)[0] print(name, end=' \r') yield name, np.load(file) for d in ut.fsort(os.listdir('.')): loc = f'{d}/result' if not os.path.isdir(loc): continue try: with ut.chdir(loc): # if any(ut.iglobm('*.npz')): # continue if any(ut.iglobm('*.npy')): print(loc) for i in range(10): out = f'out_{i:03d}.npz' if os.path.isfile(out): continue l = i * 100 u = (i + 1) * 100 if i < 9 else 1001 files = [f'out_{j:04d}.plt.npy' for j in range(l, u)] if not all(map(os.path.isfile, files)): continue print(f'{l}=>{u}', ' ' * 20, end='\r') h = dict(read(files)) try: np.savez_compressed(out, **h) except: if os.path.isfile(out): os.remove(out) raise ck = np.load(out) for nm in ck.files: f = f'{nm}.plt.npy' if os.path.isfile(f): os.remove(f) except OSError: print('OSError') time.sleep(10) continue
def find_dir(file, mode="file"): dirs = ['data', os.path.dirname(__file__)] exist_f = os.path.isfile if mode == "file" else os.path.isdir for dir in dirs: for d in glob.iglob(dir + "/**", recursive=True): if not os.path.isdir(d): continue with chdir(d): if exist_f(file): return d return None
def pack_data(): def f_(a): return a[:, :, 4] # => vorticity odir = 'result' with post_base(): with ut.chdir(odir): files = sorted(glob.iglob('out_*.npy')) # data = [np.load(file) for file in files] data = [f_(np.load(file)) for file in files[0:1000:10]] # ofile = f'vorticity_{len(files)}.npy' ofile = 'vorticity_100.npy' print(ofile) np.save(ofile, data)
def load_cfd_data(key=None): """ naca0012のデータを読み込み """ data_loc = os.path.dirname(__file__) data_file = 'naca0012_t102_d1_re104_step100.npz' with chdir(data_loc): if os.path.isfile(data_file): data = np.load(data_file) else: print(f"load_cfd_data: file not found ({data_file})") return None print("load_cfd_data: done") return data[key] if key else data
def get_result_file(root='.'): ''' 計算済み最適化ファイルを読み出す ''' with ut.chdir(root): files = glob.glob('optimize_*.pkl') for i, file in enumerate(files): print(f'[{i}] {file}') print('select file') n = int(input()) if n < 0: return file = files[n] print('file:', file) env, optimizer, history = ut.load(file) return env, optimizer, history
def main(): parser = argparse.ArgumentParser() parser.add_argument('--comp', '-c', action='store_true', help='compress results') parser.add_argument('--zip', '-z', action='store_true', help='zip images') args = parser.parse_args() if args.comp: chop_main() if args.zip: with ut.chdir('archive'): zip_main()
def plot(dest, force=False): flag = False with ut.chdir(dest): ut.mkdir(nm) # files = ut.iglobm('result/*.npy') files = ut.iglobm('result/*.dat') for file in files: basename = ut.basename(file, '.*') image = f'{nm}/{basename}.png' if os.path.isfile(image) and not force: continue print(dest, basename, end=' \r') # data = np.load(file) data = cm.read_raw(file) # print(data.shape) val = data[:, :, n] # ((u, v, p, w), ...) if n == 2: # a = val[val.shape[0]//2, val.shape[1]//4] val -= val.mean() fig, ax = plt.subplots(figsize=(6, 4)) fig.subplots_adjust(left=0.08, right=1, bottom=0, top=1) colors = [(0, '#ff0000'), (0.5, '#000000'), (1, '#00ff00')] cmap = plc.LinearSegmentedColormap.from_list('custom_cmap', colors) im = ax.imshow(val, cmap=cmap, vmin=-vr, vmax=vr) cax = fig.colorbar(im) ax_pos = ax.get_position() cax_pos0 = cax.ax.get_position() cax_pos1 = [ cax_pos0.x0, ax_pos.y0, cax_pos0.x1 - cax_pos0.x0, ax_pos.y1 - ax_pos.y0 ] cax.ax.set_position(cax_pos1) # plt.show() fig.savefig(image, bbox_inches='tight', pad_inches=0.1) plt.close('all') flag = True return flag
def zip_main(): for loc in ut.fsort(ut.iglobm('**/image_*')): # loc = f'{d}/image_w' if not os.path.isdir(loc): continue with ut.chdir(loc): file = f'{ut.basename(loc)}.zip' if os.path.isfile(file): continue print(loc) # subprocess.run('zip image_w *.png', shell=True) try: def f_(): for png in ut.fsort(ut.iglobm('*.png')): print(png, os.path.getsize(png), end='\r') with open(png, 'rb'): yield png pngs = list(f_()) if not pngs: continue with ZipFile(file, 'w', compression=ZIP_DEFLATED) as z: for png in pngs: z.write(png) except: if os.path.isfile(file): os.remove(file) raise assert os.path.isfile(file) if os.path.isfile(file): with ZipFile(file) as z: for png in z.namelist(): if os.path.isfile(png): print(png) os.remove(png)
def save_cfd_data(): """ naca0012のデータを書き込み 呼び出し: read_cfd_data """ print("save_cfd_data: start") cfd_dir = "/path/to/CFD" datafile = 'naca0012_t102_d1_re104_step100.npz' with chdir(cfd_dir): dirs = (d for d in glob.glob("*") if os.path.isdir(d)) data = {} for d in dirs: alpha = (lambda m: m and m.group())(re.search(r"(?<=_)a\d+", d)) if not alpha: continue print(d, alpha) data[alpha] = read_cfd_data(d, step=100) print(data) np.savez_compressed(datafile, **data) print("save_cfd_data: done")
def save_cfd_data(): """ naca0012のデータを書き込み (v2) *直接呼ぶ """ print("save_cfd_data: start") cfd_dir = "/path/to/CFD" data_dir = "naca0012_t102_d1_re104_a4" file_fmt = "out_%04d.plt" begin = 1000 size = 1000 step = 1 datafile = f"{data_dir}_{begin: %04d}-{begin+size-1: %04d}of{2000: %04d}.npz" with chdir(f"{cfd_dir}/{data_dir}"): data = [ read_plt(file_fmt % i) for i in range(begin, begin + size, step) ] np.savez_compressed(datafile, f()) print("save_cfd_data: done")
def main(opts=None): parser = argparse.ArgumentParser() parser.add_argument('--cycle', '-n', type=int, default=None, help='number of cycles') parser.add_argument('--out', '-o', type=str, default=None, help='output directory name') parser.add_argument('--resume', '-r', type=str, default=None, help='filename for resume calculation') args = parser.parse_args(args=opts) conf = read_inputfile('in2d.txt') if args.cycle: cycle = args.cycle else: cycle = conf.cycle if args.out: out = args.out else: out = conf.dest itr_max = conf.nitr save_interval = conf.save nx = np.array(conf.nx, dtype=np.int32) ny = np.array(conf.ny, dtype=np.int32) u = np.zeros((2, ny+2, nx+1), dtype=np.float64) # x方向速度 v = np.zeros((2, ny+1, nx+2), dtype=np.float64) # y方向速度 p = np.zeros((ny+2, nx+2), dtype=np.float64) # 圧力 t = np.zeros((ny+2, nx+2), dtype=np.float64) # 温度 f = np.ones((ny+2, nx+2), dtype=np.float64) # 流れ場情報(0=>物体上, 1=>流体) m = np.zeros((120,), dtype=np.uint8) # メッセージ格納用配列 flg = np.array(0, dtype=np.int32) # 圧力計算収束確認用 itr_hist = [] save_count = 0 register_functions() f_read_inputfile() f_initialize() with open('grid.csv') as fp: f[1:-1, 1:-1] = np.array(list(csv.reader(fp)), dtype=np.float64) if args.resume: u[0], v[0], p[:] = load_value(args.resume) for file in ut.iglobm('image/*.png'): os.remove(file) with ut.stopwatch('calc'): with ut.chdir(out): with tqdm(total=cycle, mininterval=1) as bar: for i in range(1, cycle+1): f_calc_velociry(u[0], v[0], p, t, u[1], v[1], nx, ny) f_bind_velocity(u[1], v[1], f, nx, ny) for j in range(1, itr_max+1): itr = np.array(j, dtype=np.int32) m.fill(ord(' ')) f_calc_pressure(u[1], v[1], p, itr, flg, nx, ny, m) f_bind_velocity(u[1], v[1], f, nx, ny) if j % 100 == 0: msg = ''.join(map(chr, m)).rstrip() bar.write(f'cycle={i} {msg}') if flg == 0: if i % 100 == 0: msg = ''.join(map(chr, m)).rstrip() bar.write(f'cycle={i} {msg}') break itr_hist.append(j) u[0] = u[1] v[0] = v[1] if i % save_interval == 0: k = save_count with ut.chdir('result'): dump_data(f'out_{k:05d}.npy', u[0], v[0], p) with ut.chdir('image'): plot_w(vorticity(u[0], v[0]), f'out_{k:05d}.png') plot_nitr(itr_hist, 'nitr.png') save_count += 1 bar.update()
def post_base(dest=None): if not dest: info = mg.get_info('in2d.txt') dest = info['dest'] with ut.chdir(dest): yield