def process_file(args): with open(args.config) as config_data: config = json.load(config_data) print(config) filename = args.instream print("Opening:", filename) if not args.nompi: fr = adios2.open(filename, "r", MPI.COMM_SELF, "adios2.xml", "TAUProfileOutput") else: fr = adios2.open(filename, "r", "adios2.xml", "TAUProfileOutput") # Get the attributes (simple name/value pairs) attr_info = fr.available_attributes() # Get the unique host names from the attributes num_hosts = get_num_hosts(attr_info) cur_step = 0 # Iterate over the steps for fr_step in fr: # track current step cur_step = fr_step.current_step() print(filename, "Step = ", cur_step) for f in config["figures"]: print(f["name"]) if "Timer" in f["name"]: build_topX_timers_dataframe(fr_step, cur_step, f) elif f["granularity"] == "node": build_per_host_dataframe(fr_step, cur_step, num_hosts, f) else: build_per_rank_dataframe(fr_step, cur_step, f)
def test_single_step(self): # Create a mpl figure x = np.arange(0.0, 2, 0.01) y1 = np.sin(2 * np.pi * x) y2 = 1.2 * np.sin(4 * np.pi * x) fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex=True) ax1.fill_between(x, 0, y1) ax1.set_ylabel('between y1 and 0') ax2.fill_between(x, y1, 1) ax2.set_ylabel('between y1 and 1') ax3.fill_between(x, y1, y2) ax3.set_ylabel('between y1 and y2') ax3.set_xlabel('x') # # load some image data # img = Image.open ("{}/images/simple-3x3-1.png".format(os.path.dirname(os.path.abspath(__file__)))).convert("RGB") # pngPixels = list(img.getdata()) # test writing with adios2.open("test_mpl.bp", "w") as fh: plxr.write_png_image_from_matplotlib_hl(fh, fig, 'test_image') # test reading with adios2.open("test_mpl.bp", "r") as fh: for ad_step in fh: rimg = plxr.read_image_hl(fh, 'test_image') readPixels = list(rimg.getdata())
def test_multiple_steps(self): # load some image data img1 = Image.open("{}/images/simple-3x3-1.png".format( os.path.dirname(os.path.abspath(__file__)))).convert("RGB") pngPixels1 = list(img1.getdata()) img2 = Image.open("{}/images/simple-3x3-2.png".format( os.path.dirname(os.path.abspath(__file__)))).convert("RGB") pngPixels2 = list(img2.getdata()) img3 = Image.open("{}/images/simple-3x3-3.png".format( os.path.dirname(os.path.abspath(__file__)))).convert("RGB") pngPixels3 = list(img3.getdata()) img4 = Image.open("{}/images/simple-3x3-4.png".format( os.path.dirname(os.path.abspath(__file__)))).convert("RGB") pngPixels4 = list(img4.getdata()) # test writing with adios2.open("test_multiple.bp", "w") as fh: plxr.write_png_image_hl(fh, img1, 'test_image', end_step=True) plxr.write_png_image_hl(fh, img2, 'test_image', end_step=True) plxr.write_png_image_hl(fh, img3, 'test_image', end_step=True) plxr.write_png_image_hl(fh, img4, 'test_image', end_step=True) # test reading readPixels = [] with adios2.open("test_multiple.bp", "r") as fh: for ad_step in fh: rimg = plxr.read_image_hl(ad_step, 'test_image') readPixels.append(list(rimg.getdata())) # Compare pixels to original self.assertEqual(pngPixels1, readPixels[0]) self.assertEqual(pngPixels2, readPixels[1]) self.assertEqual(pngPixels3, readPixels[2]) self.assertEqual(pngPixels4, readPixels[3])
def process_file(args): fontsize = 12 filename = args.instream print("Opening:", filename) if not args.nompi: fr = adios2.open(filename, "r", MPI.COMM_SELF, "adios2.xml", "TAUProfileOutput") else: fr = adios2.open(filename, "r", "adios2.xml", "TAUProfileOutput") initialize_globals() cur_step = 0 for fr_step in fr: # track current step cur_step = fr_step.current_step() print(filename, "Step = ", cur_step) # inspect variables in current step vars_info = fr_step.available_variables() #dumperiod_valuesars(vars_info) get_utilization(True, fr_step, vars_info, cpu_components, previous_mean, previous_count, current_period, period_values) get_utilization(False, fr_step, vars_info, mem_components, previous_mean, previous_count, current_period, period_values) get_utilization(False, fr_step, vars_info, io_components, previous_mean, previous_count, current_period, period_values) top5 = get_top5(fr_step, vars_info) x = range(0, cur_step + 1) plot_utilization(args, x, fontsize, cur_step, top5)
def CompressZfp2D(rate): fname = "BPWRZfp2D_" + str(rate) + "_py.bp" Nx = 100 Ny = 50 NSteps = 2 # initialize values r32s = np.zeros([Ny, Nx], np.float32) r64s = np.zeros([Ny, Nx], np.float64) value_ji = 0. for j in range(0, Ny): for i in range(0, Nx): r32s[j][i] = value_ji r64s[j][i] = value_ji value_ji += 1. # set global dimensions # MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() shape = [Ny * size, Nx] start = [Ny * rank, 0] count = [Ny, Nx] # writer with adios2.open(fname, "w", comm) as fw: for s in range(0, NSteps): fw.write("r32", r32s, shape, start, count, [('zfp', { 'accuracy': str(rate) })]) fw.write("r64", r64s, shape, start, count, [('zfp', { 'accuracy': str(rate) })], end_step=True) # reader with adios2.open(fname, "r", comm) as fr: for fstep in fr: in_r32s = fstep.read("r32", start, count) in_r64s = fstep.read("r64", start, count) for j in range(0, Ny): for i in range(0, Nx): assert (abs(r32s[j][i] - in_r32s[j][i]) < 1E-4) assert (abs(r64s[j][i] - in_r64s[j][i]) < 1E-4)
def test_read_strings_all_steps(self): fileName = 'string_test_all.bp' with adios2.open(fileName, "w") as fh: for i in range(N_STEPS): fh.write("string_variable", "written {}".format(i)) fh.end_step() with adios2.open(fileName, "r") as fh: n = fh.steps() name = "string_variable" result = fh.read_string(name, 0, n) expected_str = ["written {}".format(i) for i in range(n)] self.assertEqual(result, expected_str)
def process_file(args): with open(args.config) as config_data: config = json.load(config_data) # make the output directory if "SVG output directory" not in config or config["SVG output directory"] == ".": config["SVG output directory"] = os.getcwd() else: Path(config["SVG output directory"]).mkdir(parents=True, exist_ok=True) if "Timestep for filename" not in config: config["Timestep for filename"] = "default" for f in config["figures"]: if "SVG output directory" not in f or f["SVG output directory"] == ".": f["SVG output directory"] = config["SVG output directory"] else: Path(config["SVG output directory"]).mkdir(parents=True, exist_ok=True) if "Timestep for filename" not in f: f["Timestep for filename"] = config["Timestep for filename"] filename = args.instream print ("Opening:", filename) if not args.nompi: fr = adios2.open(filename, "r", MPI.COMM_SELF, "adios2.xml", "TAUProfileOutput") else: fr = adios2.open(filename, "r", config["ADIOS2 config file"], "TAUProfileOutput") # Get the attributes (simple name/value pairs) attr_info = fr.available_attributes() # Get the unique host names from the attributes num_hosts = get_num_hosts(attr_info) cur_step = 0 # Iterate over the steps for fr_step in fr: begin_time = time.time() # track current step cur_step = fr_step.current_step() print(filename, "Step = ", cur_step) for f in config["figures"]: print(f["name"]) if "Timer" in f["name"]: build_topX_timers_dataframe(fr_step, cur_step, f) elif f["granularity"] == "node": valid_ranks = get_valid_ranks(attr_info) build_per_host_dataframe(fr_step, cur_step, num_hosts, valid_ranks, f) else: build_per_rank_dataframe(fr_step, cur_step, f) fr.end_step() total_time = time.time() - begin_time print(f"Processed step in {total_time} seconds", flush=True)
def open_files(input_file, output_file, parallel=False, diskless=False): if parallel: adios2f = adios2.open(input_file, "r", comm=MPI.COMM_WORLD) else: adios2f = adios2.open(input_file, "r") netcdff = Dataset( output_file, "w", format="NETCDF4", parallel=parallel, diskless=diskless, ) netcdff.set_fill_off() return (adios2f, netcdff)
def setUp(self): total_steps = 10 with adios2.open(TESTDATA_FILENAME, "w", comm) as fh: for i in range(total_steps): fh.write("step", np.full((Nx), i, dtype=np.int32), shape, start, count) fh.end_step()
def __init__(self, istart, iend, istep, midwidth, mesh, f0): # setup flux surface average self.midwidth = midwidth self.istart = istart self.iend = iend self.istep = istep #setup flux surface average matrix # read whole data for i in range(istart, iend, istep): # 3d file name filename = "xgc.3d.%5.5d.bp" % (i) #read data with adios2.open(filename, "r") as f: dpot = f.read("dpot") dden = f.read("eden") nzeta = dpot.shape[0] print(nzeta) #check correct number dpotn0 = np.mean(dpot, axis=0) dpot = dpot - dpotn0 #numpy broadcasting #toroidal average of (dpot/Te)^2 var = np.mean(dpot**2, axis=0) / f0.Te0**2 #flux surface average of dpot/Te (midplane only) #self.dpot_te_sqr= dden = dden - np.mean(dden, axis=0) # remove n=0 mode var = dpot / f0.Te0 + dden / f0.ne0 var = np.mean(var**2, axis=0) # toroidal average
def __init__(self, filename): with adios2.open(filename, "r") as self.f: #read file and assign it self.vars = self.f.available_variables() for v in self.vars: stc = self.vars[v].get("AvailableStepsCount") ct = self.vars[v].get("Shape") sgl = self.vars[v].get("SingleValue") stc = int(stc) if ct != '': ct = int(ct) setattr( self, v, self.f.read(v, start=[0], count=[ct], step_start=0, step_count=stc)) elif v != 'gsamples' and v != 'samples': setattr(self, v, self.f.read( v, start=[], count=[], step_start=0, step_count=stc)) #null list for scalar
def __getXb(self): """ Collect the offsets of lower left corner of patch. Offset in number of grid points. Metadata for entire grid """ with adios2.open(self.path, 'r') as fh: patchXb = fh.read('grid::xb') return patchXb
def __getPatchOffsets(self): """ Collect the global position offsets of patches. Metadata for entire grid """ with adios2.open(self.path, 'r') as fh: patchCoordinates = fh.read('grid::off') return patchCoordinates
def test_GlobalArray(self): with adios2.open(filename, 'r') as fh: for fh_step in fh: t = fh_step.current_step() val = fh_step.read("global_array", (0, 1), (2, 3)) self.assertTrue(np.array_equal(val, global_arrays[t][0:2, 1:4]))
def parse_tau_file(file): """ Parses the tau file in the current location and returns a set of attributes. :param file: :return: """ attributes = {} tau_string_attrs = TAU_EXTRACT_STRING.split(",") tau_string_attrs = map(str.strip, tau_string_attrs) with adios2.open(file, "r", MPI.COMM_SELF) as tauf: for fstep in tauf: for key in tau_string_attrs: # removing the "Metadata" from the key name refactored_key = key if "MetaData" not in key else key.split(':')[-1].lower() refactored_key = refactored_key.replace(' ', '_') attributes[refactored_key] = fstep.read_attribute_string(key)[0] # inspect variables in current step step_vars = fstep.available_variables() print("Fstep value: {}\n".format(fstep)) # # print variables information for name, info in step_vars.items(): print("variable_name: " + name) for key, value in info.items(): print("\t" + key + ": " + value) print("\n") values = fstep.read("counter_values")[0] print(values) #print(fstep.read_attribute_string("MetaData:0:0:Hostname")) attributes["location"] = str(file) return get_attributes(attributes)
def test_write_read_string_highAPI(self): comm = MPI.COMM_WORLD theString = 'hello adios' bpFilename = 'string_test_highAPI.bp' varname = 'mystringvar' with adios2.open(bpFilename, "w", comm) as fh: for step in range(N_STEPS): fh.write(varname, theString + str(step), end_step=True) with adios2.open(bpFilename, "r", comm) as fh: for fstep in fh: step = fstep.current_step() result = fstep.read_string(varname) self.assertEqual(result, [theString + str(step)])
def open(self): if self.is_open == False: try: #print("[Rank ", self.my_rank, "] :","Looking for..", self.inputfile) i = 0 found = 0 # Wait until file exists.. while i < 1: if os.path.isfile(self.inputfile) or os.path.isdir(self.inputfile): #print("[Rank ", self.my_rank, "] :","found file ", self.inputfile) found = 1 break elif os.path.isfile(self.inputfile + ".sst"): #print("[Rank ", self.my_rank, "] :","found file ", self.inputfile, ".sst") found = 1 break #time.sleep(1) #print("[Rank ", self.my_rank, "] :","Found ? ", found) self.conn = adios2.open(self.inputfile, "r", self.mpi_comm, self.eng_name) self.is_open = True except Exception as ex: traceback.print_exc() #print("[Rank ", self.my_rank, "] :","Got an exception!!", ex) self.is_open = False return self.is_open
def test_LocalArray(self): with adios2.open(filename, 'r') as fh: for fh_step in fh: t = fh_step.current_step() for b in range(n_blocks): val = fh_step.read('local_array', b) self.assertTrue(np.array_equal(val, local_arrays[t][b]))
def getPatch(self, coordinates, cellsPerPatch): """ Return all features of particles in patch args: coordinates (int, int, int): lower left cell index of patch in grid cellsPerPatch [int]: number of cells per grid patch """ start, count, idx = self.__toStartCount(coordinates, cellsPerPatch) with adios2.open(self.path, 'r') as fh: x = fh.read('mprts::mprts::x', start, count).reshape(-1, 1) y = fh.read('mprts::mprts::y', start, count).reshape(-1, 1) z = fh.read('mprts::mprts::z', start, count).reshape(-1, 1) ux = fh.read('mprts::mprts::ux', start, count).reshape(-1, 1) uy = fh.read('mprts::mprts::uy', start, count).reshape(-1, 1) uz = fh.read('mprts::mprts::uz', start, count).reshape(-1, 1) kind = fh.read('mprts::mprts::kind', start, count).reshape(-1, 1) qni_wni = fh.read('mprts::mprts::qni_wni', start, count).reshape(-1, 1) prts = np.concatenate((x, y, z, ux, uy, uz, kind, qni_wni), axis=1) prts = pd.DataFrame( prts, columns=['x', 'y', 'z', 'ux', 'uy', 'uz', 'kind', 'qni_wni'], dtype=np.float32) prts[['x', 'y', 'z']] = prts[['x', 'y', 'z']] + self.xb[idx] if self.species != -1: prts = prts[prts['kind'] == self.species] return prts
def getPatchMomentum(self, coordinates, cellsPerPatch): """ Return 3D momentum of particles in patch, normalized with mass = 1 args: coordinates (int, int, int): lower left cell index of patch in grid cellsPerPatch [int]: number of cells per grid patch """ #convert coordinates to patch index here start, count, _ = self.__toStartCount(coordinates, cellsPerPatch) with adios2.open(self.path, 'r') as fh: ux = fh.read('mprts::mprts::ux', start, count).reshape(-1, 1) uy = fh.read('mprts::mprts::uy', start, count).reshape(-1, 1) uz = fh.read('mprts::mprts::uz', start, count).reshape(-1, 1) kind = fh.read('mprts::mprts::kind', start, count).reshape(-1, 1) prts = np.concatenate((ux, uy, uz, kind), axis=1) prts = pd.DataFrame(prts, columns=['ux', 'uy', 'uz', 'kind'], dtype=np.float32) if self.species != -1: prts = prts[prts['kind'] == self.species] return prts[['ux', 'uy', 'uz', 'kind']]
def __init__(self, expdir=''): fname = os.path.join(expdir, 'xgc.mesh.bp') print (f"Reading: {fname}") with ad2.open(fname, 'r') as f: self.nnodes = f.read('n_n').item() self.ncells = f.read('n_t').item() self.rz = f.read('rz') self.conn = f.read('nd_connect_list') self.psi = f.read('psi') self.psi_surf = f.read('psi_surf') self.surf_idx = f.read('surf_idx') self.surf_len = f.read('surf_len') self.nextnode = f.read('nextnode') self.r = self.rz[:,0] self.z = self.rz[:,1] if len(self.surf_len) == 0: print (f"==> Warning: no psi_surf/surf_len/surf_idx in {fname}") print (f"==> Warning: Plese check if CONVERT_GRID2 enabled.") bl = np.zeros_like(self.nextnode, dtype=bool) for i in range(len(self.surf_len)): n = self.surf_len[i] k = self.surf_idx[i,:n]-1 for j in k: bl[j] = True self.not_in_surf=np.arange(len(self.nextnode))[~bl]
def __init__(self, dir): self.dir = dir self.step = 0 self._adios_stream = adios2.open(name=f"{dir}/SimulationOutput.bp", mode="w", config_file=f"{dir}/adios.xml", io_in_config_file="SimulationOutput") self.stop = False
def __init__(self, expdir=''): fname = os.path.join(expdir, 'xgc.f0.mesh.bp') print(f"Reading: {fname}") with ad2.open(fname, 'r') as f: self.f0_nmu = f.read('f0_nmu') self.f0_nvp = f.read('f0_nvp') self.f0_smu_max = f.read('f0_smu_max') self.f0_vp_max = f.read('f0_vp_max') self.f0_dsmu = f.read('f0_dsmu') self.f0_dvp = f.read('f0_dvp') self.f0_T_ev = f.read('f0_T_ev') self.f0_grid_vol_vonly = f.read('f0_grid_vol_vonly') self.nb_curl_nb = f.read('nb_curl_nb') self.sml_e_charge = 1.6022E-19 ## electron charge (MKS) self.sml_ev2j = self.sml_e_charge self.ptl_e_mass_au = 2E-2 self.ptl_mass_au = 2E0 self.sml_prot_mass = 1.6720E-27 ## proton mass (MKS) self.ptl_mass = [ self.ptl_e_mass_au * self.sml_prot_mass, self.ptl_mass_au * self.sml_prot_mass ] self.ptl_charge_eu = 1.0 #! charge number self.ptl_e_charge_eu = -1.0 self.ptl_charge = [ self.ptl_e_charge_eu * self.sml_e_charge, self.ptl_charge_eu * self.sml_e_charge ] ## index: imu, range: [0, f0_nmu] self.mu_vol = np.ones(self.f0_nmu + 1) self.mu_vol[0] = 0.5 self.mu_vol[-1] = 0.5 ## index: ivp, range: [-f0_nvp, f0_nvp] self.vp_vol = np.ones(self.f0_nvp * 2 + 1) self.vp_vol[0] = 0.5 self.vp_vol[-1] = 0.5 #f0_smu_max = 3.0 #f0_dsmu = f0_smu_max/f0_nmu self.mu = (np.arange(self.f0_nmu + 1, dtype=np.float128) * self.f0_dsmu)**2 self.vp = np.arange( -self.f0_nvp, self.f0_nvp + 1, dtype=np.float128) * self.f0_dvp ## pre-calculation for f0_diag isp = 1 self.en_th = self.f0_T_ev[isp, :] * self.sml_ev2j self.vth2 = self.en_th / self.ptl_mass[isp] self.vth = np.sqrt(self.vth2) self.f0_grid_vol = self.f0_grid_vol_vonly[isp, :] _x, _y = np.meshgrid(self.mu_vol, self.vp_vol) self.mu_vp_vol = _x * _y
def test_write_read_string_highAPI(self): comm = MPI.COMM_WORLD theString = 'hello adios' bpFilename = 'string_test_highAPI.bp' varname = 'mystringvar' NSteps = 3 with adios2.open(bpFilename, "w", comm) as fh: for step in range(NSteps): fh.write(varname, theString + str(step), end_step=True) with adios2.open(bpFilename, "r", comm) as fh: for fstep in fh: step = fstep.current_step() result = fstep.read(varname) self.assertEqual("".join([chr(s) for s in result]), theString + str(step))
def __getSizes(self): """ Collect the number of particles assigned to each patch. Metadata for entire grid """ if ('mprts::mprts::size_by_patch' not in self.columns): return 0 with adios2.open(self.path, 'r') as fh: size_by_patch = fh.read('mprts::mprts::size_by_patch') return size_by_patch
def save_spec(results, tstep): #TODO: Determine how to use adios2 efficiently instead (and how to read in like normal, e.g. without steps?) #np.savez(resultspath+'delta.'+str(tstep).zfill(4)+'.npz',**results) with adios2.open( cfg["resultspath"] + 'delta.' + str(tstep).zfill(4) + '.bp', 'w') as fw: for key in results.keys(): fw.write(key, results[key], results[key].shape, [0] * len(results[key].shape), results[key].shape)
async def _apply_smoother(self, hess: bool): import adios2 names = [] if hess and self.precondition: src = 'hess_vel_raw.bp' dst = 'hess_vel_smooth.bp' cmd = f'xconvert_hessian kernels_raw.bp DATABASES_MPI/solver_data.bp hess_vel_raw.bp {self.precondition}' await self.mpiexec(getpath('adios', 'bin', cmd), getsize(self), 1, 0, 'adios') else: src = 'kernels_raw.bp' dst = 'hessian_smooth.bp' if hess else 'kernels_smooth.bp' # get the names of the kernels to be smoothed with adios2.open(self.abs(src), 'r') as fh: # type: ignore pf = '_crust_mantle/array' for fstep in fh: step_vars = fstep.available_variables() for name in step_vars: if name.endswith(pf): name = name.split(pf)[0] if name.startswith('hess_'): if hess: names.append(name) else: if not hess: names.append(name) # save the number of kernels being smoothed for probe_smoother kind = 'smooth_' + ('hess' if hess else 'kl') cache[kind] = len(names) # get the command to call smoother cmd = 'bin/xsmooth_laplacian_sem_adios' radius = self.smooth_hessian if hess else self.smooth_kernels if isinstance(radius, list): radius = max(radius[1], radius[0] * radius[2]**self.iteration) kl = ','.join(names) await self.mpiexec( f'{cmd} {radius} {radius} {kl} {src} DATABASES_MPI/ {dst} > OUTPUT_FILES/{kind}.txt', getsize(self), 1, 0, 'smooth_kernels') # reset status del cache[kind]
def __init__(self, expdir='', step=None): if step is None: return fname = os.path.join(expdir, 'restart_dir/xgc.f0.%05d.bp'%step) print (f"Reading: {fname}") with ad2.open(fname, 'r') as f: self.E_rho_ff = f.read('E_rho_ff') # (nphi,nnodes,3,2,3) self.pot_rho_ff = f.read('pot_rho_ff') # (nphi,nnodes,3,2) self.pot0 = f.read('pot0') # (nphi,nnodes) if len(self.E_rho_ff) == 0: print (f"==> Warning: no E_rho_ff/pot_rho_ff/pot0 data in {fname}") print (f"==> Warning: Plese check if XGC_F_COUPLING enabled.")
def process_file(args): fontsize = 12 filename = args.instream print("Opening:", filename) if not args.nompi: fr = adios2.open(filename, "r", MPI.COMM_SELF, "adios2.xml", "TAUProfileOutput") else: fr = adios2.open(filename, "r", "adios2.xml", "TAUProfileOutput") #num_threads = fr[0].available_variables()["num_threads"]["max"] try: with open("monitor_config.JSON") as config_file: settings = json.load(config_file) print("custom settings loaded") except IOError: settings = DEFAULT print("default settings loaded") num_ranks = int(fr[0]["num_threads"]["Shape"].split(',')[0]) initialize_globals(settings, num_ranks) cur_step = 0 for fr_step in fr: # track current step cur_step = fr_step.current_step() print(filename, "Step = ", cur_step) # inspect variables in current step vars_info = fr_step.available_variables() #dumperiod_valuesars(vars_info) get_utilization(True, fr_step, vars_info, cpu_components, previous_mean, previous_count, current_period, period_values) get_utilization(False, fr_step, vars_info, mem_components, previous_mean, previous_count, current_period, period_values) get_utilization(False, fr_step, vars_info, io_components, previous_mean, previous_count, current_period, period_values) top5 = get_top5(fr_step, vars_info) x = range(0, cur_step + 1) plot_utilization(args, x, fontsize, cur_step, top5, settings)
def load_simulation_data(data_dir, idx_nn, idx_kk, num_planes=8, X_key_list=[ "eden", "iden", "u_e", "u_i", "dpot", "a_par", "apar_res", "pot_res" ]): """Load simulation data from a time-step idx_nn and iteration idx_kk. Parameters: ----------- data_dir..: Data directory of the simulation idx_nn....: time-step index idx_kk....: iteration index X_key_list: Keys to load as node features """ # Datafile that stores result of the converged iteration fname_conv = f"xgc.3d.{idx_nn:05d}.c.npz" # Datafile that stores apar_try and dpot_try in iteration k fname_iter = f"xgc.3d.{idx_nn:03d}{idx_kk:02d}.npz" features_node = {} features_target = {} with adios2.open(join(data_dir, "xgc.bfield.bp"), "r") as df: Bvec = df.read("/node_data[0]/values") df.close() # Calculate the total magnetic field Btotal = np.sqrt(np.sum(Bvec**2.0, axis=1)) features_node["B"] = np.tile(Btotal, (num_planes, 1)).T.flatten() / Btotal.max() with np.load(join(data_dir, "raw", fname_iter)) as df: for key in X_key_list: features_node[key] = df[key].T.flatten() # Save apar_try and pot_try to calculate the error later apar_try = df["apar_try"].T.flatten() pot_try = df["pot_try"].T.flatten() # Define the error as err:= real_solution - current_iteration with np.load(join(data_dir, "raw", fname_conv)) as df: features_target["apar_err"] = (df["apar_try"].T.flatten() - apar_try) features_target["dpot_err"] = (df["pot_try"].T.flatten() - pot_try) # Iterate over keys in X_key_list to ensure the features are ordered correctly data_x = np.array([features_node[key] for key in ["B"] + X_key_list]).T data_y = np.array( [features_target["apar_err"], features_target["dpot_err"]]).T return (data_x, data_y)