def int_thickness_calc(self, PhyTime=None): PhyTime = self.__avg_data.check_PhyTime(PhyTime) mean_velo = self.mean_velo_peturb_calc('u', PhyTime) start = self._meta_data.metaDF['location_start_end'][0] x_loc = indexing.coord_index_calc(self.__avg_data.CoordDF, 'x', start)[0] + 1 y_coords = self.__avg_data.CoordDF['y'] U0_index = int(self.__avg_data.shape[0] * 0.5) theta_integrand = np.zeros((U0_index, self.__avg_data.shape[1])) delta_integrand = np.zeros((U0_index, self.__avg_data.shape[1])) mom_thickness = np.zeros(self.__avg_data.shape[1] - x_loc) disp_thickness = np.zeros(self.__avg_data.shape[1] - x_loc) theta_integrand = mean_velo[:U0_index] * (1 - mean_velo[:U0_index]) delta_integrand = 1 - mean_velo[:U0_index] for j in range(self.__avg_data.shape[1] - x_loc): mom_thickness[j] = integrate.simps(theta_integrand[:, j], y_coords[:U0_index]) disp_thickness[j] = integrate.simps(delta_integrand[:, j], y_coords[:U0_index]) shape_factor = np.divide(disp_thickness, mom_thickness) return disp_thickness, mom_thickness, shape_factor
def plot_shape_factor(self, PhyTime=None, fig=None, ax=None, line_kw=None, **kwargs): kwargs = cplt.update_subplots_kw(kwargs, figsize=[10, 5]) fig, ax = cplt.create_fig_ax_with_squeeze(fig, ax, **kwargs) start = self._meta_data.metaDF['location_start_end'][0] x_loc = indexing.coord_index_calc(self.__avg_data.CoordDF, 'x', start)[0] + 1 x_coords = self._meta_data.CoordDF['x'][x_loc:] - start _, _, H = self.int_thickness_calc(PhyTime) line_kw = cplt.update_line_kw(line_kw, label=r"$H$") ax.cplot(x_coords, H, **line_kw) ax.set_xlabel(r"$x/\delta$") ax.set_ylabel(r"$H$") ax.set_ylim([0, 2 * H[-1]]) ax.get_gridspec().tight_layout(fig) return fig, ax
def plot_peturb_cf(self, PhyTime=None, wall_units=False, fig=None, ax=None, **kwargs): PhyTime = self.__avg_data.check_PhyTime(PhyTime) tau_du = self.tau_du_calc(PhyTime) bulkvelo = self.__avg_data._bulk_velo_calc(PhyTime) start = self._meta_data.metaDF['location_start_end'][0] x_loc = indexing.coord_index_calc(self.__avg_data.CoordDF, 'x', start)[0] + 1 REN = self._meta_data.metaDF['REN'] rho_star = 1.0 Cf_du = tau_du[x_loc:] / (0.5 * REN * rho_star * (bulkvelo[x_loc:] - bulkvelo[0])**2) x_coords = self._meta_data.CoordDF['x'][x_loc:] - start kwargs = cplt.update_subplots_kw(kwargs, figsize=[10, 5]) fig, ax = cplt.create_fig_ax_with_squeeze(fig, ax, **kwargs) ax.cplot(x_coords, Cf_du) ax.set_xlabel(r"$x/\delta$") ax.set_ylabel(r"$C_{f,du}$") ax.set_ylim([0, 2 * Cf_du[-1]]) return fig, ax
def plot_mean_flow(self, x_vals, *args, relative=False, fig=None, ax=None, **kwargs): if not relative: return super().plot_mean_flow(x_vals, *args, fig=fig, ax=ax, **kwargs) else: fig, ax = super().plot_mean_flow(x_vals, *args, fig=fig, ax=ax, **kwargs) x_indices = indexing.coord_index_calc(self.CoordDF, 'x', x_vals) moving_wall = self._meta_data.wall_velocity[x_indices] for line, val in zip(ax.get_lines(), moving_wall): ydata = line.get_ydata().copy() ydata -= val line.set_ydata(ydata) ax.relim() ax.autoscale_view() return fig, ax
def _plot_budget_x(self, budget_terms, y_vals_list, Y_plus=True, PhyTime=None, fig=None, ax=None, **kwargs): budget_terms = self._check_terms(budget_terms) kwargs = cplt.update_subplots_kw(kwargs, figsize=[10, 5]) fig, ax = cplt.create_fig_ax_with_squeeze(fig, ax**kwargs) xaxis_vals = self.avg_data._return_xaxis() for comp in budget_terms: if y_vals_list != 'max': if Y_plus: y_index = indexing.Y_plus_index_calc( self, self.CoordDF, y_vals_list) else: y_index = indexing.coord_index_calc( self.CoordDF, 'y', y_vals_list) budget_term = self.budgetDF[PhyTime, comp] y_vals_list = indexing.ycoords_from_coords(self, y_vals_list, mode='wall')[0] for i, y_val in enumerate(y_vals_list): ax.cplot(budget_term[i], label=r"%s $y^+=%.2g$" % (comp, y_val)) ncol = cplt.get_legend_ncols( len(budget_terms) * len(y_vals_list)) ax.clegend(vertical=False, ncol=ncol, fontsize=16) else: budget_term = self.budgetDF[PhyTime, comp] budget_term = np.amax(budget_term, axis=0) ax.cplot(xaxis_vals, budget_term, label=r"maximum %s" % comp) ncol = cplt.get_legend_ncols(len(budget_terms)) ax.clegend(vertical=False, ncol=ncol, fontsize=16) fig.tight_layout() return fig, ax
def plot_perturb_velo(self, x_vals, PhyTime=None, comp='u', Y_plus=False, Y_plus_max=100, fig=None, ax=None, **kwargs): velo_peturb = self.mean_velo_peturb_calc(comp, PhyTime) kwargs = cplt.update_subplots_kw(kwargs, figsize=[10, 5]) fig, ax = cplt.create_fig_ax_with_squeeze(fig, ax, **kwargs) y_coord = self._meta_data.CoordDF['y'] if not PhyTime: PhyTime = self.__avg_data.flow_AVGDF.index[0][0] _, delta_v_star = self.__avg_data.wall_unit_calc(PhyTime) if Y_plus: y_coord = y_coord[:int(y_coord.size / 2)] y_coord = (1 - np.abs(y_coord)) / delta_v_star[0] velo_peturb = velo_peturb[:int(y_coord.size)] else: y_max = Y_plus_max * delta_v_star[0] - 1.0 start = self._meta_data.metaDF['location_start_end'][0] x_vals = [x - start for x in x_vals] x_loc = indexing.coord_index_calc(self.__avg_data.CoordDF, 'x', x_vals) for x, x_val in zip(x_loc, x_vals): label = r"$x/\delta = %.3g$" % x_val ax.cplot(velo_peturb[:, x], y_coord, label=label) ax.set_xlabel(r"$\bar{U}^{\wedge}$") if Y_plus: ax.set_ylabel(r"$y^+$") # ,fontsize=16) ax.set_ylim([0, Y_plus_max]) else: ax.set_ylabel(r"$y/\delta$") # ,fontsize=16) ax.set_ylim([-1, y_max]) ncol = cplt.get_legend_ncols(len(ax.get_lines())) ax.clegend(vertical=False, ncol=ncol) ax.get_gridspec().tight_layout(fig) return fig, ax
def mean_velo_peturb_calc(self, comp, PhyTime): U_velo_mean = self.__avg_data.flow_AVGDF[PhyTime, comp].copy() wall_velo = self._meta_data.wall_velocity for i in range(self.__avg_data.shape[0]): U_velo_mean[i] -= wall_velo start = self._meta_data.metaDF['location_start_end'][0] x_loc = indexing.coord_index_calc(self.__avg_data.CoordDF, 'x', start)[0] centre_index = int(0.5 * self.__avg_data.shape[0]) U_c0 = U_velo_mean[centre_index, 0] mean_velo_peturb = np.zeros( (self.__avg_data.shape[0], self.__avg_data.shape[1] - x_loc)) for i in range(x_loc, self.__avg_data.shape[1]): mean_velo_peturb[:, i - x_loc] = ( U_velo_mean[:, i] - U_velo_mean[:, 0]) / (U_velo_mean[centre_index, i] - U_c0) return mean_velo_peturb
def index_calc(self, comp, vals): return indexing.coord_index_calc(self, comp, vals)
def _extract_fluct(self, x, y, path_to_folder=None, time0=None, gridsize=200, y_mode='half-channel', use_ini=True, xy_inner=True, tgpost=False, abs_path=True): times = misc_utils.time_extract(path_to_folder, abs_path) if time0 is not None: times = list(filter(lambda x: x > time0, times)) if cp.rcParams['TEST']: times.sort() times = times[-5:] self._meta_data = self._module._meta_class(path_to_folder, abs_path) try: self.avg_data = self._module._avg_io_class(max(times), self._meta_data, path_to_folder, time0, abs_path) except Exception: times.remove(max(times)) self.avg_data = self._module._avg_io_class(max(times), self._meta_data, path_to_folder, time0) if xy_inner: if len(x) != len(y): msg = "length of x coordinate array must be same"+\ " as the y coord array. Lengths provided %d (x),"%len(x)+\ " %d (y)"%len(y) raise ValueError(msg) x_coord_list = x y_coord_list = y else: x_coord_list = [] y_coord_list = [] for x_val in x: for y_val in y: x_coord_list.append(x_val) y_coord_list.append(y_val) x_index = indexing.coord_index_calc(self.avg_data.CoordDF, 'x', x_coord_list) self._x_loc_norm = x_coord_list if not use_ini else [0] * len( y_coord_list) y_index = indexing.y_coord_index_norm(self.avg_data, y_coord_list, self._x_loc_norm, y_mode) y_index = np.diag(np.array(y_index)) u_prime_array = [[] for _ in range(len(y_index))] v_prime_array = [[] for _ in range(len(y_index))] for time in times: fluct_data = self._module._fluct_io_class(time, self.avg_data, path_to_folder, abs_path) u_prime_data = fluct_data.fluctDF[time, 'u'] v_prime_data = fluct_data.fluctDF[time, 'v'] for i in range(len(y_index)): u_prime_array[i].extend(u_prime_data[:, y_index[i], x_index[i]]) v_prime_array[i].extend(v_prime_data[:, y_index[i], x_index[i]]) if cp.rcParams['SymmetryAVG'] and self.metaDF['iCase'] == 1: y_size = u_prime_data.shape[1] u_prime_array[i].extend(u_prime_data[:, -1 - y_index[i], x_index[i]]) v_prime_array[i].extend( -1 * v_prime_data[:, -1 - y_index[i], x_index[i]]) # del fluct_data#; del u_prime_data; del v_prime_data gc.collect() pdf_array = [[] for _ in range(len(y_index))] u_array = [[] for _ in range(len(y_index))] v_array = [[] for _ in range(len(y_index))] estimator = seaborn._statistics.KDE(gridsize=gridsize) for i, y in enumerate(y_index): pdf_array[i], (u_array[i], v_array[i]) = estimator(np.array(u_prime_array[i]), np.array(v_prime_array[i])) # ax = seaborn.kdeplot(u_prime_array[i],v_prime_array[i],gridsize=gridsize) # for artist in ax.get_children(): # if isinstance(artist,mpl.contour.QuadContourSet): index = list(zip(x_coord_list, y_coord_list)) pdf_array = np.array(pdf_array) u_array = np.array(u_array) v_array = np.array(v_array) self._y_mode = y_mode self.pdf_arrayDF = cd.datastruct(pdf_array, index=index) self.u_arrayDF = cd.datastruct(u_array, index=index) self.v_arrayDF = cd.datastruct(v_array, index=index)