def create_general_video(fig, path_to_folder, abs_path, func, func_args=None, func_kw=None, time_range=None): times = misc_utils.time_extract(path_to_folder, abs_path) if time_range is not None: times = list(filter(lambda x: x > time_range[0], times)) times = list(filter(lambda x: x < time_range[1], times)) times.sort() if cp.rcParams["TEST"]: times = times[-10:] if func_args is None: func_args = () if func_kw is None: func_kw = {} def animate(time): return func(fig, time, *func_args, **func_kw) return mpl.animation.FuncAnimation(fig, animate, frames=times)
def _quad_extract(self, h_list, path_to_folder='.', time0=None, abs_path=True): times = misc_utils.time_extract(path_to_folder, abs_path) if time0 is not None: times = list(filter(lambda x: x > time0, times)) if cp.rcParams['TEST']: times.sort() times = times[-3:] self._meta_data = self._module._meta_class(path_to_folder, abs_path) self.NCL = self._meta_data.NCL try: self._avg_data = self._module._avg_io_class( max(times), self._meta_data, path_to_folder, time0, abs_path) except Exception: times.remove(max(times)) self._avg_data = self._module._avg_io_class( max(times), self._meta_data, path_to_folder, time0) i = 1 for timing in times: fluct_data = self._module._fluct_io_class( timing, self._avg_data, time0=time0, path_to_folder=path_to_folder, abs_path=abs_path) fluct_uv, quadrant_array = self._quadrant_extract( fluct_data.fluctDF, timing, self.NCL) coe3 = (i - 1) / i coe2 = 1 / i if i == 1: quad_anal_array = self._quad_calc(self._avg_data, fluct_uv, quadrant_array, self.NCL, h_list, timing) else: local_quad_anal_array = self._quad_calc( self._avg_data, fluct_uv, quadrant_array, self.NCL, h_list, timing) assert local_quad_anal_array.shape == quad_anal_array.shape, "shape of previous array (%d,%d) " % quad_anal_array.shape\ + " and current array (%d,%d) must be the same" % local_quad_anal_array.shape quad_anal_array = quad_anal_array * coe3 + local_quad_anal_array * coe2 gc.collect() i += 1 index = [[], []] for h in h_list: index[0].extend([h] * 4) index[1] = [1, 2, 3, 4] * len(h_list) self.shape = self._avg_data.shape self.QuadAnalDF = cd.datastruct(quad_anal_array, index=index)
def _POD_extract(self, comp, path_to_folder='.', method='svd', low_memory=True, abs_path=True, time0=None, subdomain=None, nsnapshots=100, nmodes=10): max_time = misc_utils.max_time_calc(path_to_folder, abs_path) self.avg_data = self._module._avg_class(max_time, path_to_folder=path_to_folder, abs_path=abs_path, time0=time0) if subdomain is not None: self._coorddata.create_subdomain(**subdomain) self._meta_data = self.avg_data._meta_data times = misc_utils.time_extract(path_to_folder, abs_path) if time0: times = list(filter(lambda x: x > time0, times)) times = times[-nsnapshots:] if rcParams['TEST']: times = times[-7:] nmodes = 7 if nmodes > len(times): nmodes = len(times) if method.lower() == "svd": PODmodes, self._eig_values = self._performSVD( comp, path_to_folder, abs_path, self.avg_data, times, nmodes, subdomain) elif method.lower() == "snapshots": PODmodes, self._eig_values = self._performSnapShots( comp, path_to_folder, abs_path, low_memory, self.avg_data, times, nmodes, subdomain) else: msg = f"Method selected ({method}) is not valid" raise ValueError(msg) index = [[None] * len(comp), list(comp)] self.POD_modesDF = cd.datastruct(PODmodes, index=index)
def _quad_extract(self, h_list, path_to_folder='.', time0=None, abs_path=True): times = misc_utils.time_extract(path_to_folder, abs_path) if time0 is not None: times = list(filter(lambda x: x > time0, times)) if cp.rcParams['TEST']: times.sort() times = times[-3:] meta_data = self._module._meta_class(path_to_folder, abs_path) NCL = meta_data.NCL avg_data = self._module._avg_tg_base_class( times, meta_data=meta_data, path_to_folder=path_to_folder, time0=time0, abs_path=abs_path) for timing in times: fluct_data = self._module._fluct_tg_class( timing, avg_data, path_to_folder=path_to_folder, abs_path=abs_path) fluct_uv, quadrant_array = self._quadrant_extract( fluct_data.fluctDF, timing, NCL) if 'quad_anal_array' not in locals(): quad_anal_array = self._quad_calc(avg_data, fluct_uv, quadrant_array, NCL, h_list, timing) else: local_quad_anal_array = self._quad_calc( avg_data, fluct_uv, quadrant_array, NCL, h_list, timing) quad_anal_array = np.vstack( [quad_anal_array, local_quad_anal_array]) gc.collect() index = [[], []] for h in h_list: index[0].extend([h] * 4) index[1] = [1, 2, 3, 4] * len(h_list) shape = avg_data.shape QuadAnalDF = cd.datastruct(quad_anal_array, index=index) return meta_data, NCL, avg_data, QuadAnalDF, shape
def create_video(cls, axis_vals, comp, avg_data=None, contour=True, plane='xz', meta_data=None, path_to_folder='.', time_range=None, abs_path=True, tgpost=False, x_split_list=None, plot_kw=None, lim_min=None, lim_max=None, ax_func=None, fluct_func=None, fluct_args=(), fluct_kw={}, fig=None, ax=None, **kwargs): times = misc_utils.time_extract(path_to_folder, abs_path) max_time = np.amax(times) if time_range is None else time_range[1] if avg_data is None and not tgpost: time0 = time_range[0] if time_range is not None else None avg_data = cls._module._avg_class(max_time, meta_data, path_to_folder, time0, abs_path, tgpost=cls.tgpost) axis_vals = misc_utils.check_list_vals(axis_vals) if x_split_list is None: if meta_data is None: meta_data = cls._module._meta_class(path_to_folder, abs_path, tgpost=tgpost) x_coords = meta_data.CoordDF['x'] x_split_list = [np.min(x_coords), np.max(x_coords)] if fig is None: if 'figsize' not in kwargs.keys(): kwargs['figsize'] = [ 7 * len(axis_vals), 3 * (len(x_split_list) - 1) ] fig = cplt.figure(**kwargs) if contour: plot_kw = cplt.update_pcolor_kw(plot_kw) def func(fig, time): axes = fig.axes for ax in axes: ax.remove() fluct_data = cls(time, avg_data, path_to_folder=path_to_folder, abs_path=abs_path) if contour: fig, ax = fluct_data.plot_contour(comp, axis_vals, plane=plane, PhyTime=time, x_split_list=x_split_list, fig=fig, pcolor_kw=plot_kw) else: fig, ax = fluct_data.plot_fluct3D_xz(axis_vals, comp, time, x_split_list, fig, **plot_kw) ax[0].axes.set_title(r"$t^*=%.3g$" % time, loc='left') if fluct_func is not None: fluct_func(fig, ax, time, *fluct_args, **fluct_kw) if ax_func is not None: ax = ax_func(ax) for im in ax: im.set_clim(vmin=lim_min) im.set_clim(vmax=lim_max) fig.tight_layout() return ax return cplt.create_general_video(fig, path_to_folder, abs_path, func, time_range=time_range)
def _extract_fluct(self, x, y, path_to_folder=None, time0=None, gridsize=200, y_mode='half-channel', use_ini=True, xy_inner=True, tgpost=False, abs_path=True): times = misc_utils.time_extract(path_to_folder, abs_path) if time0 is not None: times = list(filter(lambda x: x > time0, times)) if cp.rcParams['TEST']: times.sort() times = times[-5:] self._meta_data = self._module._meta_class(path_to_folder, abs_path) try: self.avg_data = self._module._avg_io_class(max(times), self._meta_data, path_to_folder, time0, abs_path) except Exception: times.remove(max(times)) self.avg_data = self._module._avg_io_class(max(times), self._meta_data, path_to_folder, time0) if xy_inner: if len(x) != len(y): msg = "length of x coordinate array must be same"+\ " as the y coord array. Lengths provided %d (x),"%len(x)+\ " %d (y)"%len(y) raise ValueError(msg) x_coord_list = x y_coord_list = y else: x_coord_list = [] y_coord_list = [] for x_val in x: for y_val in y: x_coord_list.append(x_val) y_coord_list.append(y_val) x_index = indexing.coord_index_calc(self.avg_data.CoordDF, 'x', x_coord_list) self._x_loc_norm = x_coord_list if not use_ini else [0] * len( y_coord_list) y_index = indexing.y_coord_index_norm(self.avg_data, y_coord_list, self._x_loc_norm, y_mode) y_index = np.diag(np.array(y_index)) u_prime_array = [[] for _ in range(len(y_index))] v_prime_array = [[] for _ in range(len(y_index))] for time in times: fluct_data = self._module._fluct_io_class(time, self.avg_data, path_to_folder, abs_path) u_prime_data = fluct_data.fluctDF[time, 'u'] v_prime_data = fluct_data.fluctDF[time, 'v'] for i in range(len(y_index)): u_prime_array[i].extend(u_prime_data[:, y_index[i], x_index[i]]) v_prime_array[i].extend(v_prime_data[:, y_index[i], x_index[i]]) if cp.rcParams['SymmetryAVG'] and self.metaDF['iCase'] == 1: y_size = u_prime_data.shape[1] u_prime_array[i].extend(u_prime_data[:, -1 - y_index[i], x_index[i]]) v_prime_array[i].extend( -1 * v_prime_data[:, -1 - y_index[i], x_index[i]]) # del fluct_data#; del u_prime_data; del v_prime_data gc.collect() pdf_array = [[] for _ in range(len(y_index))] u_array = [[] for _ in range(len(y_index))] v_array = [[] for _ in range(len(y_index))] estimator = seaborn._statistics.KDE(gridsize=gridsize) for i, y in enumerate(y_index): pdf_array[i], (u_array[i], v_array[i]) = estimator(np.array(u_prime_array[i]), np.array(v_prime_array[i])) # ax = seaborn.kdeplot(u_prime_array[i],v_prime_array[i],gridsize=gridsize) # for artist in ax.get_children(): # if isinstance(artist,mpl.contour.QuadContourSet): index = list(zip(x_coord_list, y_coord_list)) pdf_array = np.array(pdf_array) u_array = np.array(u_array) v_array = np.array(v_array) self._y_mode = y_mode self.pdf_arrayDF = cd.datastruct(pdf_array, index=index) self.u_arrayDF = cd.datastruct(u_array, index=index) self.v_arrayDF = cd.datastruct(v_array, index=index)
def _autocov_extract(self, comp1, comp2, path_to_folder='.', time0=None, abs_path=True, max_x_sep=None, max_z_sep=None): times = misc_utils.time_extract(path_to_folder, abs_path) if time0 is not None: times = list(filter(lambda x: x > time0, times)) if cp.rcParams['TEST']: times.sort() times = times[-3:] self._meta_data = self._module.CHAPSim_meta(path_to_folder) self.comp = (comp1, comp2) self.NCL = self._meta_data.NCL self._avg_data = self._module._avg_tg_base_class( times, meta_data=self._meta_data, path_to_folder=path_to_folder, time0=time0, abs_path=abs_path) if max_z_sep is None: max_z_sep = int(self.NCL[2] / 2) elif max_z_sep > self.NCL[2]: raise ValueError( "\033[1;32 Variable max_z_sep must be less than half NCL3 in readdata file\n" ) if max_x_sep is None: max_x_sep = int(self._NCL[0] / 2) elif max_x_sep > self.NCL[0]: raise ValueError( "\033[1;32 Variable max_x_sep must be less than half NCL3 in readdata file\n" ) # self.shape_x = (max_x_sep,self.NCL[1],len(times)) # self.shape_z = (max_z_sep,self.NCL[1],len(times)) shape_x = (max_x_sep, self.NCL[1], len(times)) shape_z = (max_z_sep, self.NCL[1], len(times)) for timing in times: fluct_data = self._module._fluct_tg_class( timing, self._avg_data, path_to_folder=path_to_folder, abs_path=abs_path) if 'R_z' not in locals(): R_z, R_x = self._autocov_calc(fluct_data, comp1, comp2, timing, max_x_sep, max_z_sep) else: local_R_z, local_R_x = self._autocov_calc( fluct_data, comp1, comp2, timing, max_x_sep, max_z_sep) R_z = np.vstack([R_z, local_R_z]) R_x = np.vstack([R_x, local_R_x]) gc.collect() R_z = R_z.T.reshape(shape_z) R_x = R_x.T.reshape(shape_x) self.autocorrDF = cd.datastruct({'x': R_x, 'z': R_z})
def _autocov_extract(self, comp1, comp2, path_to_folder=".", time0=None, abs_path=True, max_x_sep=None, max_z_sep=None): times = misc_utils.time_extract(path_to_folder, abs_path) if time0 is not None: times = list(filter(lambda x: x > time0, times)) if cp.rcParams['TEST']: times.sort() times = times[-5:] self._meta_data = self._module._meta_class(path_to_folder) self.comp = (comp1, comp2) try: self._avg_data = self._module._avg_io_class( max(times), self._meta_data, path_to_folder, time0, abs_path) except Exception: times.remove(max(times)) self._avg_data = self._module._avg_io_class( max(times), self._meta_data, path_to_folder, time0) if max_z_sep is None: max_z_sep = int(self.NCL[2] * 0.5) elif max_z_sep > self.NCL[2]: raise ValueError( "Variable max_z_sep must be less than half NCL3 in readdata file\n" ) if max_x_sep is None: max_x_sep = int(self.NCL[0] * 0.5) elif max_x_sep > self.NCL[0]: raise ValueError( "Variable max_x_sep must be less than half NCL3 in readdata file\n" ) # self.shape_x = (max_x_sep,self.NCL[1],self.NCL[0]-max_x_sep) # self.shape_z = (max_z_sep,self.NCL[1],self.NCL[0]) for i, timing in enumerate(times): fluct_data = self._module._fluct_io_class( timing, self._avg_data, time0=time0, path_to_folder=path_to_folder, abs_path=abs_path) coe3 = i / (i + 1) coe2 = 1 / (i + 1) if i == 0: R_x, R_z = self._autocov_calc(fluct_data, comp1, comp2, timing, max_x_sep, max_z_sep) else: local_R_x, local_R_z = self._autocov_calc( fluct_data, comp1, comp2, timing, max_x_sep, max_z_sep) if R_x.shape != local_R_x.shape or R_z.shape != local_R_z.shape: msg = "There is a problem. the shapes of the local and averaged array are different" raise ValueError(msg) R_x = R_x * coe3 + local_R_x * coe2 R_z = R_z * coe3 + local_R_z * coe2 if cp.rcParams['SymmetryAVG'] and self.metaDF['iCase'] == 1: vy_count = comp1.count('v') + comp2.count('v') R_x = 0.5 * (R_x + R_x[:, ::-1] * (-1)**vy_count) R_z = 0.5 * (R_z + R_z[:, ::-1] * (-1)**vy_count) self.autocorrDF = cd.datastruct({ 'x': R_x, 'z': R_z }) #.data([shape_x,shape_z])