def _lattice(infname, lattice): """! Show the hopping matrix as a 3D grid. """ getLogger("isle.show").info("Showing lattice in file %s", infname) fig = plt.figure(figsize=(10, 10)) fig.canvas.set_window_title(f"Isle Lattice - {infname}") ax = fig.add_subplot(111, projection="3d") ax.set_title(lattice.name) # draw edges hopping = lattice.hopping() maxHopping = np.max(isle.Matrix(hopping)) for i in range(lattice.nx() - 1): for j in range(i + 1, lattice.nx()): if lattice.areNeighbors(i, j): ax.plot(*zip(lattice.position(i), lattice.position(j)), color=cm.viridis_r(hopping[i, j] / maxHopping)) # an x marks the center center = sum(np.array(lattice.position(i)) for i in range(lattice.nx())) / lattice.nx() ax.scatter((center[0], ), (center[1], ), marker="x", c="k") # make background white ax.w_xaxis.set_pane_color((1.0, 1.0, 1.0, 1.0)) ax.w_yaxis.set_pane_color((1.0, 1.0, 1.0, 1.0)) ax.w_zaxis.set_pane_color((1.0, 1.0, 1.0, 1.0)) ax.set_xlabel("x") ax.set_ylabel("y") ax.set_zlabel("z")
def color_for_value(value): def scale(x): return x # identity for now, or use np.log vmin = scale(min(df['value'])) vmax = scale(max(df['value'])) proportion = scale((value - vmin) / (vmax - vmin)) return cm.viridis_r(proportion)
def plot_me(q, process): try: dat = q.get_nowait() if dat != 'Q': outname, D, zeta = dat print('Printing {}'.format(outname)) #fig, ax = plt.subplots(subplot_kw={'projection': '3d'}) #fig.set_dpi(100) #fig.set_size_inches(12,10) #fig.set_facecolor('w') ax = axes[process] print('Set axis {}'.format(process)) ax.clear() ax.set_xlim(0,10) ax.set_ylim(0,10) ax.set_zlim(0.7,1.4) #print(np.shape(X), np.shape(self.D)) print('Plotting surface 1 on {}'.format(process)) ax.plot_surface(X,Y,D[:,:,1], rstride=1, cstride=2, lw=0, facecolors=cm.viridis_r(zeta[:,:,1]), antialiased=True) #ax.plot_wireframe(X,Y,D_2d[i,0,2:-2,2:-2].T, rstride=2, cstride=2, lw=0.1, cmap=cm.viridis, antialiased=True) print('Plotting surface 0 on {}'.format(process)) ax.plot_surface(X,Y,D[:,:,0], rstride=1, cstride=2, lw=0, facecolors=cm.viridis_r(zeta[:,:,0]), antialiased=True) canvases[process].draw() print('Saving {}'.format(outname)) plt.savefig(outname) print('Saved {}'.format(outname)) windows[process].after(10, plot_me, q, process) else: print('done?') except: print('empty :(') windows[process].after(10, plot_me, q, process)
def exittime_distribution(u0s=np.linspace(0.1, 0.5, 5)): fig, ax = plt.subplots(figsize=(4, 2)) times = np.logspace(-4, 4, 200) du = 1 D = 1 for u0 in u0s: timefac = u0**2 * ( 1 - u0)**2 #Since we sample around the average value (see paper) plt.plot( np.log10(times * timefac), times * timefac * np.nan_to_num( st.distribution_exittimes(times * timefac, u0, du, D, N=201)), label=f'$u_0$=${u0:0.02}$', c=cm.viridis_r(1.5 * u0)) plt.ylim(0) plt.legend() ax.set_xlim(-3.5, 1.5) ax.set_xlabel(r'$\mathrm{Survival\ time\ }\log_{10}T_{\mathrm{surv}}/T_0$') ax.set_ylabel(r'$\mathrm{d}P/\mathrm{d}(\log_{10}T_{\mathrm{surv}}/T_0)$') return fig
for names in fileNameFits: pathNameFits.append(folderFits + names) # load data dataFits = [] for names in pathNameFits: dataFits.append(np.loadtxt(names, delimiter=" ", skiprows=1)) viridis_light = [ "#bef28d", "#67f5a8", "#34cfc7", "#59b3ff", "#9b85ff", "#b400e6" ] # plotting the data for i, sets in enumerate(data): c = cm.viridis_r((i + 1) / 6., 1) # set colour from colourmap c_l = viridis_light[i] logical_reg = (dataFits[i][:, 0] < 0.04) plt.loglog(sets[:, 0], sets[:, 1] * np.sqrt(10)**i, 'o', color=c, label=sample[i] + ' wt%') plt.plot(dataFits[i][~logical_reg, 0], dataFits[i][~logical_reg, 1] * np.sqrt(10)**i, '-', color=c_l, alpha=1) plt.plot(dataFits[i][logical_reg, 0], dataFits[i][logical_reg, 1] * np.sqrt(10)**i, lineStyle='--',
print(bins) ranges = [10, 60, 90, 180] outrange = [60, 90, 180] # # # ranges = [15, 30, 60, 202] # outrange = [ 30, 60, 202] fig = plt.figure(figsize=(8, 5), dpi=400) cc = 0.8 width = 0.7 * (bins[1] - bins[0]) center = (bins[:-1] + bins[1:]) / 2 ax1 = fig.add_subplot(111) colors = cm.viridis_r(np.linspace(0, 1, len(outrange))) for id, r in enumerate(ranges): if id == 0: continue c = colors[id - 1] start = ranges[id - 1] t = tmin[(scales <= r) & (scales > ranges[id - 1])] p = psum[(scales <= r) & (scales > ranges[id - 1])] to30 = t[p > 30] t = t[p > 1] # bins = np.percentile(t, np.arange(0,101,5))
continue llat = lat[(scales <= r) & (scales > ranges[id - 1])] t = np.concatenate(tmin[(scales <= r) & (scales>ranges[id-1])]) print('Number valid pixel', np.sum(np.isfinite(np.concatenate(tmin2[(scales <= r) & (scales > ranges[id - 1])])))) dic[r] = t dic_l[r] = llat f = plt.figure(figsize=(10, 4), dpi=300) # ax = f.add_subplot(131) # colors = cm.viridis_r(np.linspace(0,1,len(outrange))) # # for id,k in enumerate(outrange): #[::-1] # c = colors[id] # hist, h = np.histogram(dic[k], bins=np.arange(-90,-44,3), range=(-90,-45)) # weights=weights, # # ax.plot(h[1::]-0.5, hist, color=c, lw=2, label=str(ranges[id])+'-'+str(k) + ' km', marker='o') # plt.legend(fontsize=7) # plt.ylabel('Frequency of T(power maximum)') # plt.xlabel('Tmin per circle') # plt.title('Sub-system temperature minima, >15000km2') linestyles = [':', '--', '-'] ax = f.add_subplot(121) for id,k in enumerate(outrange): #
def run_one_step(self): """ """ # find the faulted node with the largest drainage area. largest_da = np.max(self._model.grid.at_node['drainage_area'][ self._model.boundary_handler['NormalFault'].faulted_nodes == True]) largest_da_ind = np.where( self._model.grid.at_node['drainage_area'] == largest_da)[0][0] start_node = self._model.grid.at_node['flow__receiver_node'][ largest_da_ind] (profile_IDs, dists_upstr) = analyze_channel_network_and_plot( self._model.grid, number_of_channels=1, starting_nodes=[start_node], create_plot=False) elevs = model.z[profile_IDs] self.relative_times.append(self._model.model_time / model.params['run_duration']) offset = np.min(elevs[0]) max_distance = np.max(dists_upstr[0][0]) self.channel_segments.append( np.array((dists_upstr[0][0], elevs[0] - offset)).T) self.xnormalized_segments.append( np.array((dists_upstr[0][0] / max_distance, elevs[0] - offset)).T) self.relative_times.append(self._model.model_time / model.params['run_duration']) colors = cm.viridis_r(self.relative_times) xmin = [xy.min(axis=0)[0] for xy in self.channel_segments] ymin = [xy.min(axis=0)[1] for xy in self.channel_segments] xmax = [xy.max(axis=0)[0] for xy in self.channel_segments] ymax = [xy.max(axis=0)[1] for xy in self.channel_segments] fs = (8, 6) fig, ax = plt.subplots(figsize=fs, dpi=300) ax.set_xlim(0, max(xmax)) ax.set_ylim(0, max(ymax)) line_segments = LineCollection(self.channel_segments, colors=colors, linewidth=0.5) ax.add_collection(line_segments) yr = str(self._model.model_time / (1e6)).zfill(4) plt.savefig('profile_' + yr + '.png') plt.close() fig, ax = plt.subplots(figsize=fs, dpi=300) ax.set_xlim(0, 1) ax.set_ylim(0, max(ymax)) line_segments = LineCollection(self.xnormalized_segments, colors=colors, linewidth=0.5) ax.add_collection(line_segments) yr = str(self._model.model_time / (1e6)).zfill(4) plt.savefig('normalized_profile_' + yr + '.png') plt.close() plt.figure() plot_channels_in_map_view(self._model.grid, profile_IDs) plt.savefig('topography_' + yr + '.png') plt.close() plt.figure() imshow_grid(model.grid, model.grid.at_node['soil__depth'], cmap='viridis', limits=(0, 15)) plt.savefig('soil_' + yr + '.png') plt.close() plt.figure() imshow_grid(self._model.grid, self._model.grid.at_node['sediment__flux'], cmap='viridis') plt.savefig('sediment_flux_' + yr + '.png') plt.close() U_eff = U_fast + U_back U_eff_slow = U_slow + U_back area = np.sort(self._model.grid.at_node['drainage_area'][ self._model.boundary_handler['NormalFault'].faulted_nodes == True]) area = area[area > 0] little_q = ( area * self._model.params['runoff_rate'])**self._model.params['m_sp'] #area_to_the_m = area ** self._model.params['m_sp'] detachment_prediction = ( (U_eff / (self._model.params['K_rock_sp'])) **(1.0 / self._model.params['n_sp']) * (1.0 / little_q)**(1.0 / self._model.params['n_sp'])) transport_prediction = ( ((U_eff * self._model.params['v_sc']) / (self._model.params['K_sed_sp'] * self._model.params['runoff_rate'])) + ((U_eff) / (self._model.params['K_sed_sp'])))**( 1.0 / self._model.params['n_sp']) * ( (1.0 / little_q)**(1.0 / self._model.params['n_sp'])) space_prediction = ( ((U_eff * self._model.params['v_sc']) * (1.0 - Ff) / (self._model.params['K_sed_sp'] * self._model.params['runoff_rate'])) + ((U_eff) / (self._model.params['K_rock_sp'])))**( 1.0 / self._model.params['n_sp']) * ( (1.0 / little_q)**(1.0 / self._model.params['n_sp'])) detachment_prediction_slow = ( (U_eff_slow / (self._model.params['K_rock_sp'])) **(1.0 / self._model.params['n_sp']) * (1.0 / little_q)**(1.0 / self._model.params['n_sp'])) transport_prediction_slow = ( ((U_eff_slow * self._model.params['v_sc']) / (self._model.params['K_sed_sp'] * self._model.params['runoff_rate'])) + ((U_eff_slow) / (self._model.params['K_sed_sp'])))**( 1.0 / self._model.params['n_sp']) * ( (1.0 / little_q)**(1.0 / self._model.params['n_sp'])) space_prediction_slow = ( ((U_eff_slow * self._model.params['v_sc']) * (1.0 - Ff) / (self._model.params['K_sed_sp'] * self._model.params['runoff_rate'])) + ((U_eff_slow) / (self._model.params['K_rock_sp'])))**( 1.0 / self._model.params['n_sp']) * ( (1.0 / little_q)**(1.0 / self._model.params['n_sp'])) # TODO need to fix space predictions here to include new soil thickness. fs = (8, 6) fig, ax = plt.subplots(figsize=fs, dpi=300) plt.plot(area, detachment_prediction, 'c', lw=5, label='Detachment Prediction') plt.plot(area, transport_prediction, 'b', label='Transport Prediction') plt.plot(area, space_prediction, 'm', label='Space Prediction') plt.plot(area, detachment_prediction_slow, 'c', lw=5, alpha=0.3) plt.plot(area, transport_prediction_slow, 'b', alpha=0.3) plt.plot(area, space_prediction_slow, 'm', alpha=0.3) plt.plot(self._model.grid.at_node['drainage_area'][ self._model.boundary_handler['NormalFault'].faulted_nodes == True], self._model.grid.at_node['topographic__steepest_slope'] [self._model.boundary_handler['NormalFault'].faulted_nodes == True], 'k.', label='Fault Block Nodes') plt.plot(self._model.grid.at_node['drainage_area'] [self._model.boundary_handler['NormalFault'].faulted_nodes == False], self._model.grid.at_node['topographic__steepest_slope'] [self._model.boundary_handler['NormalFault'].faulted_nodes == False], 'r.', label='Unfaulted Nodes') plt.plot(self._model.grid.at_node['drainage_area'][profile_IDs], self._model.grid.at_node['topographic__steepest_slope'] [profile_IDs], 'g.', label='Main Channel Nodes') plt.legend() ax.set_xscale('log') ax.set_yscale('log') plt.xlabel('log 10 Area') plt.ylabel('log 10 Slope') plt.savefig('slope_area_' + yr + '.png') plt.close()
def probability(precip=None, thresh=None): if thresh == None: thresh = 30 fpath = '/users/global/cornkle/C_paper/wavelet/figs/paper/' path = '/users/global/cornkle/C_paper/wavelet/saves/pandas/' # path = 'D://data/wavelet/saves/pandas/' dic = pkl.load(open(path + '3dmax_gt15000_lax_nonan_dominant.p', 'rb')) #noR lax_nonan scales = np.array(dic['scale']) uids, uinds = np.unique(dic['id'], return_index=True) print(np.percentile(scales, np.arange(0, 101, 20))) if precip == None: precip = 'circle_p' psum = np.array(dic[precip]) tmin = np.array(dic['circle_t']) pcsum = np.array(dic['circle_p']) pp = np.concatenate(psum) tt = np.concatenate(tmin) pall_g30 = np.sum(pp > thresh) pp15 = np.concatenate(psum[(scales <= 35)]) pt15 = (pp[tt <= -70]) print('Percentage >30 from pp>=8', pall_g30 / np.sum(pp >= 8)) print('Nb 30mm identified', pall_g30) print('Nb 30mm identified lt 35km', np.sum(pp15 >= thresh)) print('Nb 30mm identified lt 35km to identified', np.sum(pp15 >= thresh) / pall_g30) print('Nb 30mm pixel identified lt -70 to identified', np.sum(pt15 >= thresh) / pall_g30) tconv = np.concatenate(tmin) pconv = np.concatenate(psum) pconv2 = np.concatenate(pcsum) print( 'Convective fraction <-80, all scales', np.sum((tconv <= -80) & (pconv >= 8)) / np.sum((tconv <= -80) & (pconv2 >= 0))) tconv = np.concatenate(tmin[(scales <= 20)]) pconv = np.concatenate(psum[(scales <= 20)]) pconv2 = np.concatenate(pcsum[(scales <= 20)]) print( 'Convective fraction <-80', np.sum((tconv <= -80) & (pconv >= 8)) / np.sum((tconv <= -80) & (pconv2 >= 0))) print( 'Convective fraction <-90', np.sum((tconv <= -87) & (pconv >= 8)) / np.sum((tconv <= -87) & (pconv2 >= 0))) print( 'Convective fraction <-50', np.sum((tconv <= -50) & (pconv >= 8)) / np.sum((tconv <= -50) & (pconv2 >= 0))) print( 'Extreme fraction <-50', np.sum((tconv <= -50) & (pconv2 >= 30)) / np.sum((tconv <= -50) & (pconv2 >= 0))) bins = np.array(list(range( -95, -44, 5))) # compute probability per temperature range (1degC) print(bins) ranges = [10, 35, 90, 180] outrange = [35, 90, 180] fig = plt.figure(figsize=(15, 5), dpi=400) cc = 0.8 width = 0.7 * (bins[1] - bins[0]) center = (bins[:-1] + bins[1:]) / 2 ax1 = fig.add_subplot(131) ax2 = fig.add_subplot(132) ax3 = fig.add_subplot(133) colors = cm.viridis_r(np.linspace(0, 1, len(outrange))) hh1 = [] hh2 = [] low = [] up = [] for id, r in enumerate(ranges): if id == 0: continue c = colors[id - 1] start = ranges[id - 1] t = np.concatenate(tmin[(scales <= r) & (scales > ranges[id - 1])]) p = np.concatenate(psum[(scales <= r) & (scales > ranges[id - 1])]) pp = np.concatenate(pcsum[(scales <= r) & (scales > ranges[id - 1])]) to30 = t[p >= thresh] t0 = t[pp >= 0] H1, bins1 = np.histogram(to30, bins=bins, range=(-95, -45)) H, bins = np.histogram(t0, bins=bins, range=(-95, -45)) H = H.astype(float) H1 = H1.astype(float) H[H < 30] = np.nan histo = H1 / H * 100. lower, upper = stats.proportion_confint(H1, H) ax1.plot(center, histo, color=c, linewidth=1.5, label=str(start) + '-' + str(r) + ' km', marker='o') ax1.set_title('Probability Precip>30mm') ax1.fill_between(center, lower * 100, upper * 100, color=c, alpha=0.3) ax2.plot(center, H, color=c, linewidth=1.5, label=str(start) + '-' + str(r) + ' km', marker='o') ax3.set_title('Number of rainfall pixel >30mm (nP)') #ax2.set_ylim(0,160) ax3.plot(center, H1, color=c, linewidth=1.5, label=str(start) + '-' + str(r) + ' km', marker='o') ax3.set_title('Number of rainfall pixel >30mm (nP)') hh1.append(H1) hh2.append(H) low.append(lower) up.append(upper) ax1.set_xlabel('Min. Temperature (5 $^{\degree}C$ bins)') ax1.set_ylabel('Probability (% | Max. precip $>$ 30 $mm\ h^{-1}$)') plt.text(0.03, 0.9, 'b', transform=ax1.transAxes, fontsize=20) plt.legend() plt.tight_layout() plt.savefig(fpath + 'wavelet_scale_p_T_lax.png') # plt.savefig(path + 'wavelet_scale_p_T.pdf') plt.close('all') return center, np.array(hh1), np.array(hh2), low, up
def plot(): fpath = '/users/global/cornkle/C_paper/wavelet/figs/paper/' path = '/users/global/cornkle/C_paper/wavelet/saves/pandas/' # path = 'D://data/wavelet/saves/pandas/' # fpath = 'D://data/wavelet/saves/pandas/' x, y1, y2, l, u = probability('circle_p', 30) xx, yy1, yy2, ll, uu = probability('circle_pc', 8) ranges = ['15-35', '35-90', '90-180'] f = plt.figure(figsize=(11, 4), dpi=300) ax1 = f.add_subplot(121) ax2 = f.add_subplot(122) colors = cm.viridis_r(np.linspace(0, 1, len(ranges))) colors = [':', '--', '-'] ccolors = ['lightsteelblue', 'seagreen', 'grey'] #prob2 = pkl.load(open(fpath+"tonly_prob2.p", "rb")) y = y1 / y2 * 100 yy = yy1 / y2 * 100 # yy[0] = prob2[0] # # ll[0] = prob2[1] # # uu[0] = prob2[2] cnt = 0 for yl, c, cc, rang, rl, ru in zip(yy, colors, ccolors, ranges, ll, uu): rl = rl * 100 ru = ru * 100 if cnt > 0: rl[0:3] = rl[0:3] - 5 yl[0:3] = yl[0:3] - 5 ru[0:3] = ru[0:3] - 5 ax1.plot(xx, yl, color='k', linewidth=1.5, label=rang + ' km', marker='o', linestyle=c) ax1.fill_between(xx, rl, ru, color=cc, alpha=0.5) cnt = cnt + 1 print('ratio scales', y[0] / y[1]) for yl, c, cc, rang, rl, ru in zip(y, colors, ccolors, ranges, l, u): rl = rl * 100 ru = ru * 100 ax2.plot(xx, yl, color='k', linewidth=1.5, marker='o', linestyle=c) ax2.fill_between(xx, rl, ru, color=cc, alpha=0.5) ax1.set_xlabel('Pixel temperature (5 $^{\degree}C$ bins)') ax1.set_ylabel( 'Pixel probability (%)') #| Pixel precip $>$ 30 $mm\ h^{-1}$)') ax1.set_ylim(-1, 90) ax1.legend() ax1.minorticks_on() ax2.set_xlabel('Pixel temperature (5 $^{\degree}C$ bins)') ax2.set_ylabel( 'Pixel probability (%)') # | Max. precip $>$ 30 $mm\ h^{-1}$)') ax2.set_ylim(-1, 48) ax2.minorticks_on() fsiz = 14 x = 0.02 plt.annotate('a)', xy=(0.08, 0.87), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'), textcoords='offset points') plt.annotate('b)', xy=(0.57, 0.87), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'), textcoords='offset points') plt.tight_layout() plt.savefig(fpath + 'wavelet_scale_p_T_paper_lax_dominant.png') # plt.savefig(path + 'wavelet_scale_p_T.pdf') plt.close('all') print('Proportion big scale small scale: ', y[0] / y[1])
def deg_rec(filename, t, A, per_cm, init_padding, deg_padding, rec_padding, leakage, plot_leakage, immittance, plot_m, export_temps, plot_temps, init_entry, deg_entry, rec_entry, leakage_entry): ''' Reads degradation and recovery impedance `.diel` file and exports Zview files, immittance files, leakage current, and temperature information Parameters ---------- filename : string .diel file to read t : float Thickness of sample (in cm) A : float Area of sample (in cm^2) per_cm : bool, default False If True, report impedance in units of Ohm/cm and admittance in units of S/cm init_padding : int Padding of exported files. deg_padding : int Padding of exported files. rec_padding : int Padding of exported files. leakage : bool If True, then calculate leakage current at selected frequencies plot_leakage : bool If True, then plot the leakage data immittance : bool If True, then calculate immittance data plot_m : bool If True, then plot imaginary modulus export_temps : bool If True, then export temperature data plot_temps : bool If True, then plot the temperature data init_entry : int Measurement number for initial sweep. `init_entry=0` if scrpt command is `check time0` deg_entry : string Measurement numbers for degradatino sweeps, separated by a space rec_entry : string Measurement numbers for recovery sweeps, separated by a space leakage_entry : string Frequencies for leakage measurements, separated by a space. Scientific notation (`2e6`) is okay Returns ------- Various exported spreadsheets and plots, depending on inputs. ''' s, m = getdata(filename) deg_suffix = [int(x) for x in deg_entry.split()] rec_suffix = [int(x) for x in rec_entry.split()] try: init_time = m['REAL TIME{}'.format(init_entry)][0] except KeyError: init_time = 0.0 try: init_temp = m['REAL TEMPERATURE{}'.format(init_entry)][0] except KeyError: init_temp = -999 init_sweep = m['LIST_REAL_CMPLX SWEEPFREQ RX SWEEPDATA 1'] deg_time_keys = ['REAL TIME{}'.format(x) for x in deg_suffix] deg_temp_keys = ['REAL TEMPERATURE{}'.format(x) for x in deg_suffix] rec_time_keys = ['REAL TIME{}'.format(x) for x in rec_suffix] rec_temp_keys = ['REAL TEMPERATURE{}'.format(x) for x in rec_suffix] leakage_freqs = [float(x) for x in leakage_entry.split()] deg = { 'time_keys': deg_time_keys, 'temp_keys': deg_temp_keys, 'times': [], 'temps': [], 'sweeps': [] } rec = { 'time_keys': rec_time_keys, 'temp_keys': rec_temp_keys, 'times': [], 'temps': [], 'sweeps': [] } for d in [deg, rec]: for i in xrange(len(d['time_keys'])): try: d['times'] += m[d['time_keys'][i]] except KeyError: pass try: d['temps'] += m[d['temp_keys'][i]] except KeyError: pass if init_time > 0: c = 2 else: c = 1 deg['times'] = np.array(deg['times']) - init_time try: rec['times'] = np.array(rec['times']) - init_time - deg['times'][-1] except IndexError: rec['times'] = np.array(rec['times']) - init_time for i in xrange(deg['times'].size): try: deg['sweeps'].append( m['LIST_REAL_CMPLX SWEEPFREQ RX SWEEPDATA {}'.format(c)]) except KeyError: break c += 1 for i in xrange(rec['times'].size): try: rec['sweeps'].append( m['LIST_REAL_CMPLX SWEEPFREQ RX SWEEPDATA {}'.format(c)]) except KeyError: break c += 1 # Save all sweep data np.savetxt(filename[:-5] + '_' + '0'.zfill(init_padding) + '_initial.csv', init_sweep, delimiter=',') for i, data in enumerate(deg['sweeps']): np.savetxt(filename[:-5] + '_deg_' + str(int(deg['times'][i])).zfill(deg_padding) + '.csv', data, delimiter=',') for i, data in enumerate(rec['sweeps']): np.savetxt(filename[:-5] + '_rec_' + str(int(rec['times'][i])).zfill(rec_padding) + '.csv', data, delimiter=',') # Save all immittance data if immittance is True: # make a list of all .csv files in folder rawfile = glob.glob('.\*.csv') filenames = [x[2:] for x in rawfile] zview_to_immittance(filenames, A, t, per_cm) # Plot modulus data if plot_m is True: # Read modulus, frequency, and time deg_immittance = {} rec_immittance = {} init_immittance = np.array([]) i_files = glob.glob('immittance\{}*.csv'.format(filename[:-5])) for name in i_files: for p in name.split('_'): try: ti = int(p) except ValueError: pass if name.find('_deg_') >= 0: deg_immittance[ti] = np.loadtxt(name, delimiter=',', skiprows=1, usecols=(0, 7)) elif name.find('_rec_') >= 0: rec_immittance[ti] = np.loadtxt(name, delimiter=',', skiprows=1, usecols=(0, 7)) elif name.find('initial') > 0: init_immittance = np.loadtxt(name, delimiter=',', skiprows=1, usecols=(0, 7)) if len(deg_immittance) > 0: deg_cm = cm.viridis_r(np.linspace(0, 1, len(deg_immittance))) plt.figure() plt.title('Degradation') plt.xlabel('Frequency (Hz)') plt.ylabel('M\"') if init_immittance.size > 0: plt.loglog(init_immittance[:, 0], init_immittance[:, 1], 'r:', label='initial') for i, k in enumerate(sorted(deg_immittance)): if i is 0 or i is len(deg_immittance) - 1: plt.loglog(deg_immittance[k][:, 0], deg_immittance[k][:, 1], c=deg_cm[i], label=str(k)) else: plt.loglog(deg_immittance[k][:, 0], deg_immittance[k][:, 1], c=deg_cm[i]) plt.legend() plt.show() if len(rec_immittance) > 0: rec_cm = cm.viridis_r(np.linspace(0, 1, len(rec_immittance))) plt.figure() plt.title('Recovery') plt.xlabel('Frequency (Hz)') plt.ylabel('M\"') if init_immittance.size > 0: plt.loglog(init_immittance[:, 0], init_immittance[:, 1], 'r:', label='initial') for i, k in enumerate(sorted(rec_immittance)): if i is 0 or i is len(rec_immittance) - 1: plt.loglog(rec_immittance[k][:, 0], rec_immittance[k][:, 1], c=rec_cm[i], label=str(k)) else: plt.loglog(rec_immittance[k][:, 0], rec_immittance[k][:, 1], c=rec_cm[i]) plt.legend() plt.show() # Save leakage current if leakage is True: close_freq = [find_nearest_index(data[:, 0], x) for x in leakage_freqs] if per_cm is True: geo = 1.0 else: geo = (1.0 * t) / A deg_leakage = {} rec_leakage = {} l_files = glob.glob('immittance\{}*.csv'.format(filename[:-5])) for name in l_files: for p in name.split('_'): try: ti = int(p) except ValueError: pass leak_data = np.loadtxt( name, delimiter=',', skiprows=1, usecols=(2, ))[close_freq] * geo if name.find('_deg_') >= 0: deg_leakage[ti] = leak_data elif name.find('_rec_') >= 0: rec_leakage[ti] = leak_data elif name.find('initial') > 0: deg_leakage[-1] = leak_data rec_leakage[-1] = leak_data if len(deg_leakage) > 0: deg_leak_df = pd.DataFrame.from_dict(deg_leakage, orient='index') deg_leak_df.sort_index(inplace=True) deg_leak_df.columns = data[close_freq, 0] deg_leak_df.to_csv(filename[:-4] + 'leakage_deg.dat') if len(rec_leakage) > 0: rec_leak_df = pd.DataFrame.from_dict(rec_leakage, orient='index') rec_leak_df.sort_index(inplace=True) rec_leak_df.columns = data[close_freq, 0] rec_leak_df.to_csv(filename[:-4] + 'leakage_rec.dat') # Plot leakage current if plot_leakage is True: if len(deg_leakage) > 0: plt.figure() plt.title('Degradation Leakage') plt.xlabel('Time (s)') plt.ylabel('Admittance (S/cm)') for col in deg_leak_df.columns: plt.semilogy(deg_leak_df.index, deg_leak_df[col], label=col) plt.legend() plt.show() if len(rec_leakage) > 0: plt.figure() plt.title('Recovery Leakage') plt.xlabel('Time (s)') plt.ylabel('Admittance (S/cm)') for col in rec_leak_df.columns: plt.semilogy(rec_leak_df.index, rec_leak_df[col], label=col) plt.legend() plt.show() # Export temperature data if export_temps is True: if len(deg['temps']) > 0: if init_temp > -999: deg_temp_data = np.append(init_temp, deg['temps']) deg_temp_times = np.append(init_time, deg['times']) else: deg_temp_data = deg['temps'] deg_temp_times = deg['times'] deg_temp_stack = np.column_stack((deg_temp_times, deg_temp_data)) np.savetxt(filename[:-4] + '_deg_temps.dat', deg_temp_stack, delimiter=',', header='Time(s),Temp(C)') if len(rec['temps']) > 0: if init_temp > -999: rec_temp_data = np.append(init_temp, rec['temps']) rec_temp_times = np.append(init_time, rec['times']) else: rec_temp_data = rec['temps'] rec_temp_times = rec['times'] rec_temp_stack = np.column_stack((rec_temp_times, rec_temp_data)) np.savetxt(filename[:-4] + '_rec_temps.dat', deg_temp_stack, delimiter=',', header='Time(s),Temp(C)') if plot_temps is True: if len(deg['temps']) > 0: plt.figure() plt.title('Degradation Temperature') plt.xlabel('Time (s)') plt.ylabel('Temperature (deg. C)') plt.plot(deg_temp_stack[:, 0], deg_temp_stack[:, 1]) if len(rec['temps']) > 0: plt.figure() plt.title('Recovery Temperature') plt.xlabel('Time (s)') plt.ylabel('Temperature (deg. C)') plt.plot(rec_temp_stack[:, 0], rec_temp_stack[:, 1])
def probability(): df = pkl.load(open('/users/global/cornkle/C_paper/wavelet/saves/pandas/3dmax_gt15000_0.5.p', 'rb')) df2 = pkl.load(open('/users/global/cornkle/C_paper/wavelet/saves/pandas/3dmax_gt15000_0.5.p', 'rb')) ids = np.array(df['id']) scales = np.array(df['scale']) scales2 = np.array(df2['scale']) uscales = np.unique(scales) tmin = np.array(df['circle_Tcentre']) pmax = np.array(df['circle_max']) p = np.array(df['circle_p']) tmin2 = np.array(df2['circle_Tcentre']) pmax2 = np.array(df2['circle_max']) p2 = np.array(df2['circle_p']) ranges = np.arange(-90,-49,10) #ranges=[-90,-40] scaler = [15, 20, 30, 40, 50, 60, 70, 80, 100, 120, 150, 205] #scaler = np.unique(df['scale']) dic = {} dic2 = {} dic3 = {} dic4 = {} f = plt.figure(figsize=(15, 8), dpi=400) # ax1 = f.add_subplot(221) # ax2 = f.add_subplot(222) ax3 = f.add_subplot(121) ax4 = f.add_subplot(122) colors = cm.viridis_r(np.linspace(0, 1, len(ranges))) for id, r in enumerate(ranges): if id == 0: continue filter = (tmin <= r) & (tmin > ranges[id - 1]) & (pmax > 0.1) filter2 = (tmin2 <= r) & (tmin2 > ranges[id - 1]) & (pmax2 > 0.1) sc = (scales[filter]) sc2 = (scales2[filter2]) pp = p[filter] pp2 = p2[filter2] #psum = psum[filter] #pnz = pnz[filter] dic[r]=[] dic2[r] = [] dic3[r] = [] dic4[r] = [] for ids, usc in enumerate(scaler): if ids == 0: continue ffilter = (sc <= usc) & (sc > scaler[ids-1]) ffilter2 = (sc2 <= usc) & (sc2 > scaler[ids - 1]) ppf = np.concatenate(pp[ffilter]) ppf2 = np.concatenate(pp2[ffilter2]) #dic[r].append(np.nansum(ppf[ppf>30])/np.nansum(ppf>=0)) #dic[r].append(np.nansum(psum[ffilter])/np.nansum(pnz[ffilter])) cnt = 0 for maxi in pp[ffilter]: if np.nanmax(maxi) >= 30: cnt += 1 #dic[r].append(cnt/len(ffilter) ) dic[r].append(np.nansum(ffilter)) # dic2[r].append(cnt / np.nansum(ffilter) ) dic3[r].append(np.nansum(ppf2>30)/np.nansum(ppf2>=0.)) # dic[r].append(np.nanmax(ppf)) dic4[r].append(np.percentile(ppf2[ppf2 >= 0.1], 99)) # ax1.plot(scaler[0:-1], (dic[r]), color=colors[id], label=str(ranges[id-1])+' to '+str(r) + ' C') # ax2.plot(scaler[0:-1], (dic2[r]), color=colors[id], label=str(ranges[id - 1]) + ' to ' + str(r) + ' C') ax3.plot(scaler[0:-1], (dic3[r]), color=colors[id], label=str(ranges[id - 1]) + ' to ' + str(r) + ' C') ax4.plot(scaler[0:-1], (dic4[r]), color=colors[id], label=str(ranges[id - 1]) + ' to ' + str(r) + ' C') # ax1.set_xlabel('Scale (km)') # ax1.set_ylabel('Probability') # #ax1.title("Nb of circles with Rain_max > 30 / nb of circles (per scale/Trange)") # # ax2.set_xlabel('Scale (km)') # ax2.set_ylabel('95th Percentile') plt.legend() plt.tight_layout()
def mesh_plot(input_filename=None, filename=None, start=0): # the other version was really slow - this does it by hand, making a load of png files then using ffmpeg to stitch them together. It finishes by deleting all the pngs. # set defaults if input_filename is None: input_filename = 'mesh_input.txt' if filename is None: filename = '../../Documents/Work/swerve/iridis2' data_filename = filename + '.h5' # read input file input_file = open(input_filename, 'r') inputs = input_file.readlines() for line in inputs: name, *dat = line.split() if name == 'nx': nx = int(dat[0]) elif name == 'ny': ny = int(dat[0]) elif name == 'nt': nt = int(dat[0]) elif name == 'nlayers': nlayers = int(dat[0]) elif name == 'xmin': xmin = float(dat[0]) elif name == 'xmax': xmax = float(dat[0]) elif name == 'ymin': ymin = float(dat[0]) elif name == 'ymax': ymax = float(dat[0]) elif name == 'gamma_down': gamma_down = np.array([float(i) for i in dat]) if len(gamma_down) == 4: gamma_up = np.reshape(gamma_down, (2, 2)) gamma_up[0, 0] = 1. / gamma_up[0, 0] gamma_up[1, 1] = 1. / gamma_up[1, 1] else: n = int(np.sqrt(len(gamma_down))) gamma_up = np.reshape(gamma_down, (n, n)) gamma_up = inv(gamma_up) elif name == 'dprint': dprint = int(dat[0]) dx = (xmax - xmin) / (nx - 2) dy = (ymax - ymin) / (ny - 2) dt = 0.1 * min(dx, dy) input_file.close() # read data f = tb.open_file(data_filename, 'r') table = f.root.SwerveOutput D_2d = table[:, :, :, :, 0] Sx = table[:, :, :, :, 1] Sy = table[:, :, :, :, 2] DX = table[:, :, :, :, 3] v = np.sqrt(Sx**2 + Sy**2) heights = find_height(D_2d, Sx, Sy, gamma_up) #D_2d[D_2d > 1.e3] = 0. #D_2d = D_2d[::dprint,:,:,:] #print(D_2d[:,:,2:-2,2:-2]) x = np.linspace(0, xmax, num=nx - 4, endpoint=False) y = np.linspace(0, ymax, num=ny - 4, endpoint=False) X, Y = np.meshgrid(x, y) fig = plt.figure(figsize=(12, 10), facecolor='w', dpi=100) ax = fig.gca(projection='3d') location = '/'.join(filename.split('/')[:-1]) name = filename.split('/')[-1] #print('shapes: X {}, Y {}, D2d {}'.format(np.shape(X), np.shape(Y), np.shape(D_2d[0,2:-2,2:-2].T))) for i in range(start, len(D_2d[:, 0, 0, 0]) - 1): #if i % 10 == 0: print('Printing {}'.format(i)) outname = location + '/plotting/' + name + '_' + format(i, '05') + '.png' ax.clear() ax.set_xlim(0, 10) ax.set_ylim(0, 10) #ax.set_zlim(2.24,2.3) for l in range(0, 2): face_colours = DX[i, l, 2:-2, 2:-2].T if abs(np.amax(face_colours)) > 0.: face_colours /= abs(np.amax(face_colours)) ax.plot_surface(X, Y, heights[i, l, 2:-2, 2:-2].T, rstride=1, cstride=2, lw=0, cmap=cm.viridis_r, antialiased=True, facecolors=cm.viridis_r(face_colours)) plt.savefig(outname) # close hdf5 file f.close()
def probability(): df = pkl.load( open( '/users/global/cornkle/C_paper/wavelet/saves/pandas/3dmax_gt15000_0.5.p', 'rb')) df2 = pkl.load( open( '/users/global/cornkle/C_paper/wavelet/saves/pandas/3dmax_gt15000_0.5.p', 'rb')) ids = np.array(df['id']) scales = np.array(df['scale']) scales2 = np.array(df2['scale']) uscales = np.unique(scales) tmin = np.array(df['circle_Tcentre']) pmax = np.array(df['circle_max']) p = np.array(df['circle_p']) tmin2 = np.array(df2['circle_Tcentre']) pmax2 = np.array(df2['circle_max']) p2 = np.array(df2['circle_p']) ranges = np.arange(-90, -49, 10) #ranges=[-90,-40] scaler = [15, 20, 30, 40, 50, 60, 70, 80, 100, 120, 150, 205] #scaler = np.unique(df['scale']) dic = {} dic2 = {} dic3 = {} dic4 = {} f = plt.figure(figsize=(15, 8), dpi=400) # ax1 = f.add_subplot(221) # ax2 = f.add_subplot(222) ax3 = f.add_subplot(121) ax4 = f.add_subplot(122) colors = cm.viridis_r(np.linspace(0, 1, len(ranges))) for id, r in enumerate(ranges): if id == 0: continue filter = (tmin <= r) & (tmin > ranges[id - 1]) & (pmax > 0.1) filter2 = (tmin2 <= r) & (tmin2 > ranges[id - 1]) & (pmax2 > 0.1) sc = (scales[filter]) sc2 = (scales2[filter2]) pp = p[filter] pp2 = p2[filter2] #psum = psum[filter] #pnz = pnz[filter] dic[r] = [] dic2[r] = [] dic3[r] = [] dic4[r] = [] for ids, usc in enumerate(scaler): if ids == 0: continue ffilter = (sc <= usc) & (sc > scaler[ids - 1]) ffilter2 = (sc2 <= usc) & (sc2 > scaler[ids - 1]) ppf = np.concatenate(pp[ffilter]) ppf2 = np.concatenate(pp2[ffilter2]) #dic[r].append(np.nansum(ppf[ppf>30])/np.nansum(ppf>=0)) #dic[r].append(np.nansum(psum[ffilter])/np.nansum(pnz[ffilter])) cnt = 0 for maxi in pp[ffilter]: if np.nanmax(maxi) >= 30: cnt += 1 #dic[r].append(cnt/len(ffilter) ) dic[r].append(np.nansum(ffilter)) # dic2[r].append(cnt / np.nansum(ffilter)) dic3[r].append(np.nansum(ppf2 > 30) / np.nansum(ppf2 >= 0.)) # dic[r].append(np.nanmax(ppf)) dic4[r].append(np.percentile(ppf2[ppf2 >= 0.1], 99)) # ax1.plot(scaler[0:-1], (dic[r]), color=colors[id], label=str(ranges[id-1])+' to '+str(r) + ' C') # ax2.plot(scaler[0:-1], (dic2[r]), color=colors[id], label=str(ranges[id - 1]) + ' to ' + str(r) + ' C') ax3.plot(scaler[0:-1], (dic3[r]), color=colors[id], label=str(ranges[id - 1]) + ' to ' + str(r) + ' C') ax4.plot(scaler[0:-1], (dic4[r]), color=colors[id], label=str(ranges[id - 1]) + ' to ' + str(r) + ' C') # ax1.set_xlabel('Scale (km)') # ax1.set_ylabel('Probability') # #ax1.title("Nb of circles with Rain_max > 30 / nb of circles (per scale/Trange)") # # ax2.set_xlabel('Scale (km)') # ax2.set_ylabel('95th Percentile') plt.legend() plt.tight_layout()
def quick_plot(input_filename=None, filename=None, start=0): # the other version was really slow - this does it by hand, making a load of png files then using ffmpeg to stitch them together. It finishes by deleting all the pngs. # set defaults if input_filename is None: input_filename = 'input_file.txt' if filename is None: filename = '../../Documents/Work/swerve/iridis2' data_filename = filename + '.h5' # read input file input_file = open(input_filename, 'r') inputs = input_file.readlines() for line in inputs: name, *dat = line.split() if name == 'nx': nx = int(dat[0]) elif name == 'ny': ny = int(dat[0]) elif name == 'nt': nt = int(dat[0]) elif name == 'nlayers': nlayers = int(dat[0]) elif name == 'xmin': xmin = float(dat[0]) elif name == 'xmax': xmax = float(dat[0]) elif name == 'ymin': ymin = float(dat[0]) elif name == 'ymax': ymax = float(dat[0]) elif name == 'dprint': dprint = int(dat[0]) dx = (xmax - xmin) / (nx - 2) dy = (ymax - ymin) / (ny - 2) dt = 0.1 * min(dx, dy) input_file.close() # read data f = tb.open_file(data_filename, 'r') table = f.root.SwerveOutput D_2d = np.swapaxes(table[:, :, :, :, 0], 1, 3) zeta_2d = np.swapaxes(table[:, :, :, :, 3], 1, 3) #D_2d[D_2d > 1.e3] = 0. #D_2d = D_2d[::dprint,:,:,:] #print(D_2d[:,:,2:-2,2:-2]) x = np.linspace(0, xmax, num=nx - 4, endpoint=False) y = np.linspace(0, ymax, num=ny - 4, endpoint=False) X, Y = np.meshgrid(x, y) fig = plt.figure(figsize=(12, 10), facecolor='w', dpi=100) ax = fig.gca(projection='3d') #print(np.shape(X), np.shape(Y), np.shape(D_2d[0,1,:,:].T)) location = '/'.join(filename.split('/')[:-1]) name = filename.split('/')[-1] for i in range(start, len(D_2d[:, 0, 0, 0])): #if i % 10 == 0: print('Printing {}'.format(i)) outname = location + '/plotting/' + name + '_' + format(i, '05') + '.png' ax.clear() ax.set_xlim(0, 10) ax.set_ylim(0, 10) ax.set_zlim(0.7, 1.9) ax.plot_surface(X, Y, D_2d[i, 1, 2:-2, 2:-2].T, rstride=1, cstride=2, lw=0, facecolors=cm.viridis_r(zeta_2d[i, 1, 2:-2, 2:-2].T), antialiased=True) #ax.plot_wireframe(X,Y,D_2d[i,0,2:-2,2:-2].T, rstride=2, cstride=2, lw=0.1, cmap=cm.viridis, antialiased=True) ax.plot_surface(X, Y, D_2d[i, 0, 2:-2, 2:-2].T, rstride=1, cstride=2, lw=0, facecolors=cm.viridis_r(zeta_2d[i, 0, 2:-2, 2:-2].T), antialiased=True) plt.savefig(outname) # close hdf5 file f.close() # now make a video! """
# perform the SSA steps Y = embed_data(data, L, K, 1) C = make_covariance(Y, K) PC, EOF, SV = make_pcs(C, Y) plt.figure() for i in range(0, 20, 2): if i == 0: O = reconstruct(PC, EOF, 0, K, 0) O += reconstruct(PC, EOF, 1, K, 0) else: O += reconstruct(PC, EOF, i, K, 0) O += reconstruct(PC, EOF, i + 1, K, 0) plt.plot(O, color=cm.viridis_r(i / 9.)) plt.plot(data[0], color='red', linestyle='dashed') plt.title('first PC $group$ reconstruction') plt.xlabel('sample number (length $N$)') plt.ylabel('amplitude') # ### Example 5 # # We haven't even discussed the _M_ in MSSA yet: what can we do with multiple dimensions? # # Luckily, we've set up the definitions above to be MSSA-aware, so we can just throw a couple of switches and perform MSSA. Let's start with the simplest case: duplicating the sine wave twice as the input. # In[ ]: ndim = 2 # as this is SSA, we only have one dimension of data
for k in range(n_cycles): ax[1][1].plot(Qd_cycle[k], T_cycle[k], '-') ax[0][1].legend(I_leg, ncol=2, frameon=False) ax[1][1].legend(I_leg, ncol=2, frameon=False) ax[0][1].set_yticks(np.arange(2, 3.51, 0.5)) ax[1][1].set_ylim([30, 80]) ax[0][1].set_xlabel('Capacity (Ah)') ax[0][1].set_ylabel('Voltage (V)') ax[1][1].set_xlabel('Capacity (Ah)') ax[1][1].set_ylabel('Can temperature (°C)') ########## e-f ########## colors = [] colors = cm.viridis_r(np.linspace(0.15, 1, 4)) colors = colors[:, 0:3] file_list = sorted(glob.glob('2019*.csv')) Crates = np.asarray([3.6, 4, 4.4, 4.8, 5.2, 5.6, 6, 7, 8]) currents = Crates * 1.1 overpotential = np.zeros((len(file_list), 4, 9)) R = np.zeros((2, 4, 2)) SOCs = ['20% SOC: Data', '40% SOC: Data', '60% SOC: Data', '80% SOC: Data'] for k, file in enumerate(file_list): # Extract data data = np.genfromtxt(file, skip_header=True, delimiter=',')
import pandas as pd import numpy as np from sklearn.preprocessing import MaxAbsScaler, MinMaxScaler, StandardScaler, RobustScaler from sklearn.feature_selection import VarianceThreshold from sklearn.semi_supervised import LabelSpreading from sklearn.metrics import classification_report, confusion_matrix from scipy import stats import os import natsort import re import matplotlib.pyplot as plt from matplotlib import cm from figsave import save_fig ############################################################################################# n = 25 colors = cm.viridis_r(np.linspace(0, 1, n)) def data_set(DATA_PATH: str, zone: int, save: bool): feature_pb = pd.read_csv(os.path.join( DATA_PATH, pb_data)).set_index('pktnum').drop(columns=['EL', 'ETS', 'dR']) feature_vb_pb = pd.read_csv(os.path.join( DATA_PATH, all_data)).set_index('pktnum').drop(columns=['EL', 'ETS', 'dR']) feature_test = pd.read_csv(os.path.join( DATA_PATH, test_data)).set_index('pktnum').drop(columns=['EL', 'ETS', 'dR']) pd_index = feature_pb.index vb_index = feature_vb_pb.index
def plot(): fpath = '/users/global/cornkle/C_paper/wavelet/figs/paper/' path = '/users/global/cornkle/C_paper/wavelet/saves/pandas/' # path = 'D://data/wavelet/saves/pandas/' # fpath = 'D://data/wavelet/saves/pandas/' x,y1, y2, l, u = probability('circle_p', 30) xx,yy1, yy2, ll, uu = probability('circle_pc', 8) ranges = ['15-35', '35-90', '90-180'] f = plt.figure(figsize=(11, 4), dpi=300) ax1 = f.add_subplot(121) ax2 = f.add_subplot(122) colors = cm.viridis_r(np.linspace(0, 1, len(ranges))) colors = [':', '--', '-'] ccolors = ['lightsteelblue', 'seagreen', 'grey'] #prob2 = pkl.load(open(fpath+"tonly_prob2.p", "rb")) y = y1/y2*100 yy = yy1 / y2 * 100 # yy[0] = prob2[0] # # ll[0] = prob2[1] # # uu[0] = prob2[2] cnt=0 for yl, c, cc, rang, rl, ru in zip(yy,colors, ccolors, ranges,ll, uu): rl = rl * 100 ru = ru * 100 if cnt>0: rl[0:3]=rl[0:3]-5 yl[0:3]=yl[0:3]-5 ru[0:3]=ru[0:3]-5 ax1.plot(xx, yl, color='k', linewidth=1.5, label=rang+ ' km', marker='o', linestyle=c) ax1.fill_between(xx, rl, ru, color=cc, alpha=0.5) cnt = cnt+1 print('ratio scales',y[0]/y[1]) for yl, c, cc, rang, rl, ru in zip(y,colors, ccolors, ranges,l, u): rl = rl*100 ru = ru*100 ax2.plot(xx, yl, color='k', linewidth=1.5,marker='o', linestyle=c) ax2.fill_between(xx, rl , ru, color=cc, alpha=0.5) ax1.set_xlabel('Pixel temperature (5 $^{\degree}C$ bins)') ax1.set_ylabel('Pixel probability (%)') #| Pixel precip $>$ 30 $mm\ h^{-1}$)') ax1.set_ylim(-1,90) ax1.legend() ax1.minorticks_on() ax2.set_xlabel('Pixel temperature (5 $^{\degree}C$ bins)') ax2.set_ylabel('Pixel probability (%)')# | Max. precip $>$ 30 $mm\ h^{-1}$)') ax2.set_ylim(-1, 48) ax2.minorticks_on() fsiz = 14 x = 0.02 plt.annotate('a)', xy=(0.08, 0.87), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'), textcoords='offset points') plt.annotate('b)', xy=(0.57, 0.87), xytext=(0, 4), size=fsiz, xycoords=('figure fraction', 'figure fraction'), textcoords='offset points') plt.tight_layout() plt.savefig(fpath + 'wavelet_scale_p_T_paper_lax_dominant.png') # plt.savefig(path + 'wavelet_scale_p_T.pdf') plt.close('all') print('Proportion big scale small scale: ', y[0]/y[1])
def tau_plot(self,ssa_obj,t,tau,plot_type='contour', plot_all = False): stime = ssa_obj.time_rec-ssa_obj.start_time idx_t = (np.abs(stime - t)).argmin() idx_tau = (np.abs(stime - tau)).argmin() diff = idx_tau - idx_t difftime = t-tau if plot_type == 'Average': fig,ax= plt.subplots() for i in range(len(stime)-idx_tau,0,-4): idx_tau = (np.abs(stime- (stime[i]+difftime ))).argmin() Itau = ssa_obj.intensity_vec[:,idx_tau] x,y = np.mean(ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe)),np.mean(ssa_obj.intensity_vec[:,idx_tau+diff]/np.sum(ssa_obj.probe)) if plot_type == 'window': minx = 10000000 maxx = 0 miny = 10000000 maxy = 0 fig,ax= plt.subplots() for i in range(len(stime)-idx_tau,0,-10): idx_tau = (np.abs(stime - (idx_t+i))).argmin() Itau = ssa_obj.intensity_vec[:,idx_tau] x,y = np.mean(ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe)),np.mean(ssa_obj.intensity_vec[:,idx_tau+diff]/np.sum(ssa_obj.probe)) minx = min(np.min(x),minx) miny = min(np.min(y),miny) maxx = max(np.max(x),maxx) maxy = max(np.max(y),maxy) ax.scatter(x, y,zorder=3,color= cm.viridis_r(1.*i/len(stime))) c_map_ax = fig.add_axes([.95, 0.1, 0.1, 0.8]) c_map_ax.axes.get_xaxis().set_visible(False) cbar = mpl.colorbar.ColorbarBase(c_map_ax, cmap=cm.viridis_r, orientation = 'vertical') cbar.ax.set_yticklabels(np.linspace(idx_t,stime[-1],6).astype(int) ) cbar.ax.set_title('t') ax.plot([min(minx,miny),max(maxx,maxy)],[min(minx,miny),max(maxx,maxy)], color='red',ls='--') ax.set_ylabel(('<I(t=' + 't + tau'+')>')) ax.set_xlabel(('<I(t=' +'t'+')>')) ax.set_title(( 'Average I(t) vs Average I(t+tau) for tau = ' + str(diff) ) ) if plot_type == 'density': fig,ax= plt.subplots() nbins = int(np.max(ssa_obj.intensity_vec/np.sum(ssa_obj.probe)))+2 x, y = ssa_obj.intensity_vec[:,idx_t]/np.sum(ssa_obj.probe),ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe) k = kde.gaussian_kde([x,y]) xi, yi = np.mgrid[x.min():x.max():nbins*1j, y.min():y.max():nbins*1j] zi = k(np.vstack([xi.flatten(), yi.flatten()])) R = pearsonr(x,y)[0] ax.set_title(('Density Plot' + ' R = ' + str(np.round(R,3)))) ax.pcolormesh(xi, yi, zi.reshape(xi.shape), shading='gouraud', cmap=plt.cm.viridis) ax.contour(xi, yi, zi.reshape(xi.shape) ) ax.set_ylabel(('I(t=' + str(tau)+')')) ax.set_xlabel(('I(t=' + str(t)+')')) fig.show() if plot_type == 'set_tau': fig,ax= plt.subplots() for i in range(len(stime)-diff-idx_t): idx_tau = (np.abs(stime - (idx_t+i))).argmin() plt.scatter(ssa_obj.intensity_vec[:,i]/np.sum(ssa_obj.probe), ssa_obj.intensity_vec[:,i+diff]/np.sum(ssa_obj.probe),c= cm.viridis(1.*i/len(stime)),alpha=.5 ) plt.ylabel('I(t + s)') plt.xlabel(('I(t)')) plt.title(('Set tau, all times s = ' + str(diff) )) c_map_ax = fig.add_axes([.95, 0.1, 0.1, 0.8]) c_map_ax.axes.get_xaxis().set_visible(False) cbar = mpl.colorbar.ColorbarBase(c_map_ax, cmap=cm.viridis, orientation = 'vertical') cbar.ax.set_yticklabels(np.linspace(idx_t,stime[-1],6).astype(int) ) if plot_type == 'scatter': if not plot_all: plt.scatter(ssa_obj.intensity_vec[:,idx_t]/np.sum(ssa_obj.probe), ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe) ) plt.ylabel(('I(t=' + str(tau)+')')) else: for i in range(idx_t,len(stime)): idx_tau = (np.abs(stime - (idx_t+i))).argmin() plt.scatter(ssa_obj.intensity_vec[:,idx_t]/np.sum(ssa_obj.probe), ssa_obj.intensity_vec[:,idx_tau]/np.sum(ssa_obj.probe),c= cm.viridis(1.*i/len(stime)),alpha=.1 ) plt.ylabel('I(tau)') plt.xlabel(('I(t=' + str(t)+')')) if plot_type == 'contour': fig,ax= plt.subplots() if not plot_all: It = ssa_obj.intensity_vec[:,idx_t] Itau = ssa_obj.intensity_vec[:,idx_tau] cov = np.cov(It,Itau) eigs, v = np.linalg.eig(cov) eigs = np.sqrt(eigs) plt.ylabel(('I(t=' + str(tau)+')')) colors = [cm.viridis(1.0),cm.viridis(.5),cm.viridis(0.0),cm.viridis(0.0)] for j in xrange(3, 0,-1): ell_artist = Ellipse(xy=(np.mean(It), np.mean(Itau)), width=eigs[0]*j*2, height=eigs[1]*j*2, angle=np.rad2deg(np.arccos(v[0, 0]))) ell_artist.set_linewidth(2) ell_artist.set_edgecolor(colors[j-1]) ell_artist.set_color(colors[j-1]) ax.add_patch(ell_artist) ax.autoscale() ax.set_xlim(0) ax.set_ylim(0) ax.scatter(It, Itau,zorder=3,alpha=0.3,color='red',marker='.') fig.show() else: plt.ylabel('I(tau)') It = ssa_obj.intensity_vec[:,idx_t] for i in range(len(stime)-idx_t,0,-10): idx_tau = (np.abs(stime - (idx_t+i))).argmin() Itau = ssa_obj.intensity_vec[:,idx_tau] cov = np.cov(It,Itau) eigs, v = np.linalg.eig(cov) eigs = np.sqrt(eigs) j = 3 ell_artist = Ellipse(xy=(np.mean(It), np.mean(Itau)), width=eigs[0]*j*2, height=eigs[1]*j*2, angle=np.rad2deg(np.arccos(v[0, 0]))) ell_artist.set_linewidth(2) ell_artist.set_edgecolor( cm.viridis_r(1.*i/len(stime))) ell_artist.set_color( cm.viridis_r(1.*i/len(stime))) ax.autoscale() ax.add_patch(ell_artist) ax.figure.canvas.draw() plt.xlabel(('I(t=' + str(t)+')')) ax.set_xlim(0) ax.set_ylim(0) c_map_ax = fig.add_axes([.95, 0.1, 0.1, 0.8]) c_map_ax.axes.get_xaxis().set_visible(False) cbar = mpl.colorbar.ColorbarBase(c_map_ax, cmap=cm.viridis_r, orientation = 'vertical') cbar.ax.set_yticklabels(np.linspace(idx_t,stime[-1],6).astype(int) ) fig.show()
for key in list(data.keys()): if key[key.find('-') + 1:] == args.epochs: keys.append(key[:key.find('-')]) for metric in metrics: metrics[metric].append(data[key][metric]) for metric in metrics: metrics[metric] = np.array(metrics[metric]) colors = [] n_cols = 6 #len(metrics) * 2 for i in range(n_cols): if i < n_cols / 2: colors.append(cm.plasma(i / (n_cols / 2))) else: colors.append(cm.viridis_r(i / (n_cols / 2) - 1)) colors = [colors[i*2] for i in range(int(n_cols/2))] + \ [colors[i*2+1] for i in range(int(n_cols/2))] # colors = colors[2:] + colors[:2] fontsize = 36 markersize = 18 figsizescaler = 1.7 fig = plt.figure(figsize=(figsizescaler * (len(keys)), 10)) ax = fig.add_subplot(1, 1, 1) transparency = [1,1,1,\ 0,1,1] # plot = 0 # for metric in metrics: # ax.plot(metrics[metric][:,0], label=metric+'-Old',
df['Tests per 1M Pop'] = df['Tests per 1M Pop'].str.replace(",", "") df["Tests per 1M Pop"] = pd.to_numeric(df["Tests per 1M Pop"], downcast="float") df = df.fillna(0) df.at[13, 'TotalRecovered'] = 344 #data analysis df['Positivepercentage'] = ((df['TotalCases']) / df['TotalTests']) * 100 df['Positivepercentage'] = df['Positivepercentage'].replace(np.inf, 0) df['RecoveredPercentage'] = ((df['TotalRecovered']) / df['TotalCases']) * 100 df['DeathPercentage'] = ((df['TotalDeaths']) / df['TotalCases']) * 100 df = df.sort_values('TotalCases', ascending=False) df1 = df.loc[df["TotalCases"] > 1000] d_color = cm.viridis_r(np.linspace(.4, .8, 30)) r_color = cm.magma_r(np.linspace(.4, .8, 30)) c_color = cm.inferno_r(np.linspace(.4, .8, 30)) #Bar chart df1.groupby("Country,Other").DeathPercentage.max().sort_values( ascending=False)[:25].plot.bar(color=d_color) plt.title( "Top 25 countries with highest Death Percentage whose total cases are greater than 1000" ) plt.xlabel("Country") plt.ylabel("Death Percentage") plt.show() df1.groupby("Country,Other").RecoveredPercentage.max().sort_values( ascending=False).tail(25).plot.bar(color=r_color)
def probability(precip=None,thresh=None): if thresh == None: thresh = 30 fpath = '/users/global/cornkle/C_paper/wavelet/figs/paper/' path = '/users/global/cornkle/C_paper/wavelet/saves/pandas/' # path = 'D://data/wavelet/saves/pandas/' dic = pkl.load(open(path + '3dmax_gt15000_lax_nonan_dominant.p', 'rb')) #noR lax_nonan scales = np.array(dic['scale']) uids, uinds = np.unique(dic['id'], return_index=True) print(np.percentile(scales, np.arange(0,101,20))) if precip == None: precip = 'circle_p' psum = np.array(dic[precip]) tmin = np.array(dic['circle_t']) pcsum=np.array(dic['circle_p']) pp = np.concatenate(psum) tt = np.concatenate(tmin) pall_g30 = np.sum(pp>thresh) pp15= np.concatenate(psum[(scales<=35)]) pt15 = (pp[tt <= -70]) print('Percentage >30 from pp>=8', pall_g30/np.sum(pp>=8)) print('Nb 30mm identified', pall_g30) print('Nb 30mm identified lt 35km', np.sum(pp15>=thresh)) print('Nb 30mm identified lt 35km to identified', np.sum(pp15>=thresh) / pall_g30) print ('Nb 30mm pixel identified lt -70 to identified', np.sum(pt15>=thresh) / pall_g30) tconv = np.concatenate(tmin) pconv = np.concatenate(psum) pconv2 = np.concatenate(pcsum) print('Convective fraction <-80, all scales', np.sum((tconv <= -80) & (pconv >= 8)) / np.sum((tconv <= -80) & (pconv2>=0))) tconv = np.concatenate(tmin[(scales<=20)]) pconv = np.concatenate(psum[(scales <= 20)]) pconv2 = np.concatenate(pcsum[(scales <= 20)]) print('Convective fraction <-80', np.sum((tconv<=-80) & (pconv>=8)) / np.sum((tconv<=-80) & (pconv2>=0))) print('Convective fraction <-90', np.sum((tconv <= -87) & (pconv >= 8)) / np.sum((tconv <= -87) & (pconv2 >= 0))) print('Convective fraction <-50', np.sum((tconv <= -50) & (pconv >= 8)) / np.sum((tconv <= -50) & (pconv2 >= 0))) print('Extreme fraction <-50', np.sum((tconv <= -50) & (pconv2 >= 30)) / np.sum((tconv <= -50) & (pconv2 >= 0))) bins = np.array(list(range(-95, -44, 5))) # compute probability per temperature range (1degC) print(bins) ranges = [10, 35, 90, 180] outrange = [ 35, 90, 180] fig = plt.figure(figsize=(15, 5), dpi=400) cc = 0.8 width = 0.7 * (bins[1] - bins[0]) center = (bins[:-1] + bins[1:]) / 2 ax1 = fig.add_subplot(131) ax2 = fig.add_subplot(132) ax3 = fig.add_subplot(133) colors = cm.viridis_r(np.linspace(0, 1, len(outrange))) hh1 = [] hh2 = [] low = [] up = [] for id, r in enumerate(ranges): if id == 0: continue c = colors[id-1] start = ranges[id-1] t = np.concatenate(tmin[(scales <= r) & (scales > ranges[id - 1])]) p = np.concatenate(psum[(scales <= r) & (scales > ranges[id - 1])]) pp = np.concatenate(pcsum[(scales <= r) & (scales > ranges[id - 1])]) to30 = t[p>=thresh] t0 = t[pp>=0] H1, bins1 = np.histogram(to30, bins=bins, range=(-95, -45)) H, bins = np.histogram(t0, bins=bins, range=(-95, -45)) H = H.astype(float) H1 = H1.astype(float) H[H < 30] = np.nan histo = H1 / H * 100. lower, upper = stats.proportion_confint(H1, H) ax1.plot(center, histo, color=c, linewidth=1.5, label=str(start)+'-'+str(r) + ' km',marker='o' ) ax1.set_title('Probability Precip>30mm') ax1.fill_between(center, lower*100, upper*100, color=c, alpha=0.3) ax2.plot(center, H, color=c, linewidth=1.5, label=str(start) + '-' + str(r) + ' km',marker='o') ax3.set_title('Number of rainfall pixel >30mm (nP)') #ax2.set_ylim(0,160) ax3.plot(center, H1, color=c, linewidth=1.5, label=str(start) + '-' + str(r) + ' km',marker='o') ax3.set_title('Number of rainfall pixel >30mm (nP)') hh1.append(H1) hh2.append(H) low.append(lower) up.append(upper) ax1.set_xlabel('Min. Temperature (5 $^{\degree}C$ bins)') ax1.set_ylabel('Probability (% | Max. precip $>$ 30 $mm\ h^{-1}$)') plt.text(0.03, 0.9, 'b', transform=ax1.transAxes, fontsize=20) plt.legend() plt.tight_layout() plt.savefig(fpath + 'wavelet_scale_p_T_lax.png') # plt.savefig(path + 'wavelet_scale_p_T.pdf') plt.close('all') return center, np.array(hh1), np.array(hh2), low, up