def __init__(self, parent): # super(StocksGraphView, self).__init__(parent) self.fatherHandle = parent self.figure = plt.gcf() self.ax = self.figure.gca() self.canvas = figureCanvas(self.figure) self.hintText = self.ax.text(-.5, -.5, "", ha="right", va="baseline", fontdict={"size": 15}) self.figure.canvas.mpl_connect('key_press_event', self._on_key_press) self.figure.canvas.mpl_connect('button_press_event', self._on_button_press) # figure.canvas.mpl_disconnect(figure.canvas.manager.key_press_handler_id) self.figure.canvas.mpl_connect('motion_notify_event', self._on_mouse_move) self._lines = {} self._hHintLine = None self._vHintLine = None self.ax.fmt_date = matplotlib.dates.DateFormatter('%Y-%m-%d') self.strpdate2num = matplotlib.dates.strpdate2num('%Y-%m-%d') plt.subplots_adjust(left=.04, bottom=.0, right=.98, top=.97, wspace=.0, hspace=.0) plt.minorticks_on() self.ax.grid() self.ax.xaxis.set_major_formatter(matplotlib.dates.DateFormatter('%y\n-\n%m\n-\n%d'))
def SetAxes(legend=False): # plt.axhline(y=0.165, ls='-', c='k', label=r'$\Omega_{b}$/$\Omega_{M}$ (WMAP)') f_b = 0.164 f_star = 0.01 err_b = 0.004 err_star = 0.004 f_gas = f_b - f_star err_gas = np.sqrt(err_b**2 + err_star**2) plt.axhline(y=f_gas, ls='--', c='k', label='', zorder=-1) x = np.linspace(1e+13,200e+13,1000) plt.fill_between(x, y1=f_gas - err_gas, y2=f_gas + err_gas, color='k', alpha=0.3, zorder=-1) plt.text(10e+13, f_gas+0.005, r'f$_{gas}$', verticalalignment='bottom', size='large') plt.xlabel(r'M$_{vir}$ (M$_\odot$)', size='x-large') plt.ylabel(r'f$_{gas}$ ($<$ r)', size='x-large') plt.xscale('log') plt.xlim([1e+13,2e+15]) plt.ylim(ymin=0.03) plt.tick_params(length=10, which='major') plt.tick_params(length=5, which='minor') plt.minorticks_on() if legend: plt.legend(loc=0, prop={'size':'large'}, markerscale=0.7, numpoints=1)
def plot(filename): import os from matplotlib.pyplot import clf, tricontour, tricontourf, \ gca, savefig, rc, minorticks_on if not os.path.exists(filename): return -1 rc('text', usetex=True) clf() x, y, tri, ux, uy = load_velocity(filename) tricontourf(x, y, tri, ux, 16) tricontour(x, y, tri, ux, 16, linestyles='-', colors='black', linewidths=0.5) minorticks_on() gca().set_aspect('equal') gca().tick_params(direction='out', which='both') gca().set_xticklabels([]) gca().set_yticklabels([]) name, _ = os.path.splitext(filename) name = os.path.basename(name) savefig('{0}.png'.format(name), dpi=300, bbox_inches='tight') savefig('{0}.pdf'.format(name), bbox_inches='tight')
def cumulative_frequency(self): # taken from http://stackoverflow.com/questions/15408371/cumulative-distribution-plots-python # make the array onedimensionally data = np.ravel(self.map) # sort the data sorted_data = np.sort(data) # Or data.sort(), if data can be modified x = sorted_data y = np.arange(sorted_data.size)/1000.0 # Cumulative distributions: plt.step(x, y) # From 0 to the number of data points-1 # alternatively cumfreqs, lowlim, binsize, extrapoints = scipy.stats.cumfreq(data, numbins=4) plt.title('Cumulative frequency') plt.xlabel('e- /pix /4.4s') plt.ylabel('1000 counts') plt.xlim(0,100) # see http://matplotlib.org/examples/pylab_examples/axes_demo.html # this is another inset axes over the main axes a = plt.axes([0.4, 0.2, .4, .5]) plt.step(x, y) # we want to see minor ticks in the plot, disabled by default plt.minorticks_on() # set the limits for both axis plt.xlim(20, 25) plt.ylim(104,109) plt.savefig('../ScatterMap1_cumfreq.png') plt.savefig('../ScatterMap1_cumfreq.pdf') plt.show() # close the plot gracefully plt.close()
def apcorr_plot(self): ''' Creates a plot of delta_mag versus instru_mag to determine in which region(s) to compute zpt_off. ''' counts1, counts2, insmag1, insmag2, delta_mag, vegamag = \ self.mag_calc() for image in self.imlist: mpl.rcParams['font.family'] = 'Times New Roman' mpl.rcParams['font.size'] = 12.0 mpl.rcParams['xtick.major.size'] = 10.0 mpl.rcParams['xtick.minor.size'] = 5.0 mpl.rcParams['ytick.major.size'] = 10.0 mpl.rcParams['ytick.minor.size'] = 5.0 mpl.minorticks_on() mpl.ion() mpl.title(image) mpl.ylabel('$\Delta$ mag (r=' + self.apertures[0] + ', ' + \ self.apertures[-1] + ')') mpl.xlabel('-2.5 log(flux)') mpl.scatter(insmag1, delta_mag, s=1, c='k') mpl.savefig(image[:-9] + '_apcorr.png') left = raw_input('left: ') right = raw_input('right: ') bottom = raw_input('bottom: ') top = raw_input('top: ') mpl.close() zpt_off_calc(left, right, top, bottom)
def plot_Nhden(elem,N,hcol,hden,bounds=False): for i in to_plot[elem]: plt.clf() x = np.array(hden,dtype=np.float) y = np.array(N[i]) #x,y,hcol = trim(x,y,hcol) y = hcol[0] - y xlims=[0.75*np.amin(x), 1.25*np.amax(x)] ylims=[0.75*np.amin(y), 1.25*np.amax(y)] try: if bounds: l = minNHI - observed[elem][i]["column"][2] if observed[elem][i]["column"][0]==-30.: u=maxNHI else: u = maxNHI - observed[elem][i]["column"][0] plt.fill([-30.,30., 30., -30.], [l,l,u,u], '0.50', alpha=0.2, edgecolor='b') #plt.fill_between(np.arange(xlims[0],xlims[1]),lower,upper,color='0.50') except KeyError: pass plt.plot(x, y, color_map[i],label=ion_state(i,elem)) plt.ylabel(r"log $N_{HI}/N_{%s}$"%(str(elem)+str(roman[i]))) plt.xlabel("log $n_{H}$") plt.minorticks_on() makedir('hden') f=os.path.join(paths["plot_path"],"hden", elem+roman[i]+"N_Nhden.png") plt.xlim([-3.,0.]) #plt.ylim(ylims) plt.savefig(f) plt.show() plt.close()
def make_voronoi_intens(targetSN, w1, w2): """ Make image""" image = "collapsed_w{0}_{1}.fits".format(w1, w2) intens = pf.getdata(image) extent = calc_extent(image) vordata = pf.getdata("voronoi_sn{0}_w{1}_{2}.fits".format(targetSN, w1, w2)) vordata = np.ma.array(vordata, mask=np.isnan(vordata)) bins = np.unique(vordata)[:-1] combined = np.zeros_like(intens) combined[:] = np.nan for j, bin in enumerate(bins): idx, idy = np.where(vordata == bin) flux = intens[idx,idy] combined[idx,idy] = np.nanmean(flux) vmax = np.nanmedian(intens) + 4 * np.nanstd(intens) fig = plt.figure(1) plt.minorticks_on() make_contours() plt.imshow(combined, cmap="cubehelix_r", origin="bottom", vmax=vmax, extent=extent, vmin=0) plt.xlabel("X [kpc]") plt.ylabel("Y [kpc]") cbar = plt.colorbar() cbar.set_label("Flux [$10^{-20}$ erg s$^{-1}$ cm$^{-2}$]") plt.savefig("figs/intens_sn{0}.png".format(targetSN), dpi=300) pf.writeto("figs/intens_sn{0}.fits".format(targetSN), combined, clobber=True) return
def make_intens_all(w1, w2): fig = plt.figure(figsize=(6., 6.)) gs = gridspec.GridSpec(1,1) gs.update(left=0.13, right=0.985, bottom = 0.13, top=0.988) ax = plt.subplot(gs[0]) plt.minorticks_on() make_contours() labels = ["A", "B", "C", "D"] for i, field in enumerate(fields): os.chdir(os.path.join(data_dir, "combined_{0}".format(field))) image = "collapsed_w{0}_{1}.fits".format(w1, w2) intens = pf.getdata(image, verify=False) extent = calc_extent(image) extent = offset_extent(extent, field) plt.imshow(intens, cmap="bone", origin="bottom", extent=extent, vmin=-20, vmax=80) verts = calc_verts(intens, extent) path = Path(verts, [Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY,]) patch = patches.PathPatch(path, facecolor='none', lw=2, edgecolor="r") ax.add_patch(patch) xtext, ytext = np.mean(verts[:-1], axis=0) plt.text(xtext-8, ytext+8, labels[i], color="r", fontsize=35, fontweight='bold', va='top') plt.hold(True) plt.xlim(26, -38) plt.ylim(-32, 32) plt.xlabel("X [kpc]") plt.ylabel("Y [kpc]") # plt.show() plt.savefig(os.path.join(plots_dir, "muse_fields.eps"), dpi=60, format="eps") plt.savefig(os.path.join(plots_dir, "muse_fields.png"), dpi=200) return
def make_lick_individual(targetSN, w1, w2): """ Make maps for the kinematics. """ filename = "lick_corr_sn{0}.tsv".format(targetSN) binimg = pf.getdata("voronoi_sn{0}_w{1}_{2}.fits".format(targetSN, w1, w2)) intens = "collapsed_w{0}_{1}.fits".format(w1, w2) extent = calc_extent(intens) bins = np.loadtxt(filename, usecols=(0,), dtype=str).tolist() bins = np.array([x.split("bin")[1] for x in bins]).astype(int) data = np.loadtxt(filename, usecols=np.arange(25)+1).T labels = [r'Hd$_A$', r'Hd$_F$', r'CN$_1$', r'CN$_2$', r'Ca4227', r'G4300', r'Hg$_A$', r'Hg$_F$', r'Fe4383', r'Ca4455', r'Fe4531', r'C4668', r'H$_\beta$', r'Fe5015', r'Mg$_1$', r'Mg$_2$', r'Mg$_b$', r'Fe5270', r'Fe5335', r'Fe5406', r'Fe5709', r'Fe5782', r'Na$_D$', r'TiO$_1$', r'TiO$_2$'] mag = "[mag]" ang = "[\AA]" units = [ang, ang, mag, mag, ang, ang, ang, ang, ang, ang, ang, ang, ang, ang, mag, mag, ang, ang, ang, ang, ang, ang, ang, mag, mag] lims = [[None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None], [None, None]] pdf = PdfPages("figs/lick_sn{0}.pdf".format(targetSN)) fig = plt.figure(1, figsize=(6.25,5)) plt.subplots_adjust(bottom=0.12, right=0.97, left=0.09, top=0.96) plt.minorticks_on() ax = plt.subplot(111) ax.minorticks_on() plot_indices = np.arange(12,22) for i, vector in enumerate(data): if i not in plot_indices: continue print "Making plot for {0}...".format(labels[i]) kmap = np.zeros_like(binimg) kmap[:] = np.nan for bin,v in zip(bins, vector): idx = np.where(binimg == bin) kmap[idx] = v vmin = lims[i][0] if lims[i][0] else np.median(vector) - 2 * vector.std() vmax = lims[i][1] if lims[i][1] else np.median(vector) + 2 * vector.std() m = plt.imshow(kmap, cmap="inferno", origin="bottom", vmin=vmin, vmax=vmax, extent=extent, aspect="equal") make_contours() plt.minorticks_on() plt.xlabel("X [kpc]") plt.ylabel("Y [kpc]") plt.xlim(extent[0], extent[1]) plt.ylim(extent[2], extent[3]) cbar = plt.colorbar(m) cbar.set_label("{0} {1}".format(labels[i], units[i])) pdf.savefig() plt.clf() pdf.close() return
def plotErrorResSize(): matplotlib.rcParams.update({'font.size': 25}) npzFile = '2016-04-28-09-57_bigRunOnlySnap.npz' npz2 = '2016-04-28-15-18_bigRunOnlySnap.npz' projectPath = 'C:\Users\Steve\Documents\Uni\BAThesis\\src\\errorResSize.pdf' pp = PdfPages(projectPath) a = np.load(getProjectPath()+npzFile) errors = a['errors'] errors = np.mean(errors,2).squeeze() b = np.load(getProjectPath()+npz2) errors2 = b['errors'] errors2 = np.mean(errors2,2).squeeze() plt.figure(figsize=(10,7.5)) plt.plot(errors, 'o', linestyle='-', linewidth=3, label='ridge para = 0.01') #plt.plot(errors2, 'o', linestyle='-', linewidth=3, label='ridge para = 0.1') plt.grid() plt.minorticks_on() plt.grid(which='minor', axis='y') plt.xlabel('Reservoir size') ticks = np.arange(0, 8) labels = [25,50,100,200,400,800,1600,3200] plt.xticks(ticks, labels) plt.ylabel('Validation error') plt.ylim(0,1) plt.tight_layout() pp.savefig() pp.close()
def SetAxes(legend=False): f_b = 0.164 f_star = 0.01 err_b = 0.006 err_star = 0.004 f_gas = f_b - f_star err_gas = np.sqrt(err_b**2 + err_star**2) plt.axhline(y=f_gas, ls='--', c='k', label='', zorder=-1) x = np.linspace(.0,2.,1000) plt.fill_between(x, y1=f_gas - err_gas, y2=f_gas + err_gas, color='k', alpha=0.3, zorder=-1) plt.text(.6, f_gas+0.006, r'f$_{gas}$', verticalalignment='bottom', size='large') plt.xlabel(r'r/r$_{vir}$', size='x-large') plt.ylabel(r'f$_{gas}$ ($<$ r)', size='x-large') plt.xscale('log') plt.xticks([1./1.9, 1.33/1.9, 1, 1.5, 2.],[r'r$_{500}$', r'r$_{200}$', 1, 1.5, 2], size='large') #plt.yticks([.1, .2], ['0.10', '0.20']) plt.tick_params(length=10, which='major') plt.tick_params(length=5, which='minor') plt.xlim([0.4,1.5]) plt.minorticks_on() if legend: plt.legend(loc=0, prop={'size':'small'}, markerscale=0.7, numpoints=1, ncol=2)
def plotcurve(xax,f1,f2,ct): fig, axes = plt.subplots(nrows=4, ncols=1, sharex=True) plt.minorticks_on() fig.subplots_adjust(hspace = 0.001) plt.rc('font', family='serif',serif='Times') y_formatter = matplotlib.ticker.ScalarFormatter(useOffset=False) axes[0].plot(xax,f1[0],'D-',c='k',mec='b',fillstyle='none') axes[0].plot(xax,f2[0],'o-',c='g',mec='k',fillstyle='none') axes[0].set_ylabel(r'$raw$ $RMS$',fontsize=13) axes[0].yaxis.set_major_formatter(y_formatter) axes[0].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[1].plot(xax,f1[1],'D-',c='k',mec='b',fillstyle='none') axes[1].plot(xax,f2[1],'o-',c='g',mec='k',fillstyle='none') axes[1].set_ylabel(r'$frames$ $RMS$',fontsize=13) axes[1].yaxis.set_major_formatter(y_formatter) axes[1].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[2].plot(xax,f1[2],'D-',c='k',mec='b',fillstyle='none') axes[2].plot(xax,f2[2],'o-',c='g',mec='k',fillstyle='none') axes[2].set_ylabel(r'$\sigma-clipped$',fontsize=13) axes[2].yaxis.set_major_formatter(y_formatter) axes[2].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[3].plot(xax,f1[3],'D-',c='k',mec='b',fillstyle='none',label='Hard') axes[3].plot(xax,f2[3],'o-',c='g',mec='k',fillstyle='none',label='Soft') axes[3].set_ylabel(r'$\sigma$ $clipped$ $RMS$',fontsize=13) axes[3].set_xlabel(r'$aperture$ $(pixels)$',fontsize=13) axes[3].yaxis.set_major_formatter(y_formatter) axes[3].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[3].legend(numpoints=1) plt.savefig('paneltest/'+str(ct)+'updchanges.png',bbox_inches='tight',dpi=200)
def plotshare(self): datesaxis=mdates.date2num(self.dates) fig0=plt.figure() ax0=fig0.add_subplot(1,1,1) dateFmt = mdates.DateFormatter('%Y-%m-%d') ax0.xaxis.set_major_formatter(dateFmt) plt.minorticks_on() N=len(datesaxis) #ax0.xaxis.set_major_locator(DaysLoc) index=np.arange(N) # dev=np.abs(self.share_prices[:,0]-self.share_prices[:,2]) # p0=plt.errorbar(index,self.share_prices,dev, fmt='.-',ecolor='green',elinewidth=0.1,linewidth=1) p0=plt.plot(index,self.share_prices) ax0.legend([p0],[symbol]) ax0.set_ylabel( u'Index') ax0.xaxis.set_major_formatter(ticker.FuncFormatter(lambda x, pos=None: dates[int(x)])) ax0.set_xticks(np.arange(0,index[-1],4)) ax0.set_xlim(index[0],index[-1]) fig0.autofmt_xdate(rotation=90) fig0.savefig('./figures/sharesPrices.eps') plt.show()
def avg_row_col_main(self): ''' The main controller. ''' self.get_image_list() self.parse_image_info() self.read_data() self.calc_avg() # Set plotting parameters plt.rcParams['legend.fontsize'] = 10 plt.rcParams['font.family'] = 'Helvetica' plt.minorticks_on() # Plot the data if self.plot_type == 'row' or self.plot_type == 'both': self.descrip = 'Row' self.anti_descrip = 'Column' if self.all_switch == 'off': self.plot_single_data(self.avg_row_list) elif self.all_switch == 'on': self.plot_all_data(self.avg_row_list) elif self.plot_type == 'col' or self.plot_type == 'both': self.descrip = 'Column' self.anti_descrip = 'Row' if self.all_switch == 'off': self.plot_single_data(self.avg_col_list) elif self.all_switch == 'on': self.plot_all_data(self.avg_col_list)
def plotter(x, y, image, dep_var, ind_var): """ :param x: your dependent variable :param y: your independent variable :return: """ # todo - make little gridlines # turn your x and y into numpy arrays x = np.array(x) y = np.array(y) ETrF_vs_NDVI = plt.figure() aa = ETrF_vs_NDVI.add_subplot(111) aa.set_title('Bare soils/Tailings Pond - {}'.format(image), fontweight='bold') aa.set_xlabel('{}'.format(dep_var), style='italic') aa.set_ylabel('{}'.format(ind_var), style='italic') aa.scatter(x, y, facecolors='none', edgecolors='blue') plt.minorticks_on() # aa.grid(b=True, which='major', color='k') aa.grid(b=True, which='minor', color='white') plt.tight_layout() # TODO - UNCOMMENT AND CHANGE THE PATH TO SAVE THE FIGURE AS A PDF TO A GIVEN LOCATION. # plt.savefig( # "/Volumes/SeagateExpansionDrive/jan_metric_PHX_GR/green_river_stack/stack_output/20150728_ETrF_NDVI_gr.pdf") plt.show()
def plot_field_corr2(cat,theta,out,err,out2,err2,label): plt.figure() plt.errorbar(theta,theta*out[0],yerr=theta*err[0],marker='o',linestyle='',color='r',label=r'$e_1$') plt.errorbar(theta,theta*out2[0],yerr=theta*err2[0],marker='o',linestyle='',color='b',label=r'$e_2$') if 'chip' not in label: plt.axvline(x=5.25*60, linewidth=1, color='k') elif 'corner' in label: plt.axvline(x=0.75*60, linewidth=1, color='k') plt.axvline(x=0.15*60, linewidth=1, color='k') plt.axvline(x=0.765*60, linewidth=1, color='k') elif 'centre' in label: plt.axvline(x=0.75*60/2., linewidth=1, color='k') plt.axvline(x=0.15*60/2., linewidth=1, color='k') plt.axvline(x=0.765*60/2., linewidth=1, color='k') plt.ylabel(r'$\langle e \rangle$') plt.xlabel(r'$\theta$ (arcmin)') plt.ylim((-.005,.005)) plt.xscale('log') plt.minorticks_on() plt.legend(loc='upper right',ncol=1, frameon=True,prop={'size':12}) plt.savefig('plots/xi/field_'+label+'_'+cat.name+'_mean_e.png', bbox_inches='tight') plt.close() return
def plot_hist(x1,bins=config.cfg.get('hbins',500),name='',label='',tile='',w=None): print 'hist ',label,tile if tile!='': bins/=10 plt.figure() if (w is None)|(tile!=''): plt.hist(x1,bins=bins,histtype='stepfilled') else: plt.hist(x1,bins=bins,alpha=0.25,normed=True,label='unweighted',histtype='stepfilled') plt.hist(x1,bins=bins,alpha=0.25,normed=True,weights=w,label='weighted',histtype='stepfilled') plt.ylabel(r'$n$') s=config.lbl.get(label,label.replace('_','-')) if config.log_val.get(label,False): s='log '+s plt.xlabel(s+' '+tile) plt.minorticks_on() if tile!='': name='tile_'+tile+'_'+name plt.legend(loc='upper right',ncol=2, frameon=True,prop={'size':12}) plt.savefig('plots/hist/hist_'+name+'_'+label.replace('_','-')+'.png', bbox_inches='tight') plt.close() return
def pieces_plot(dgs, title, outfname): """ Plot deltaG over lambda as well as the electrostatic and vdW components. Parameters ---------- dgs: array of energies, in order of total dG, elec, vdW title: string name of the main title outfname: string name of the image to be saved """ lambdas = np.linspace(0., 1., len(dgs[0])) # for x-axis, lambda from 0 to 1 labels = ['$\Delta$G','electrostatic','van der Waals'] plt.figure() for y, l in zip(dgs, labels): plt.plot(lambdas, y, label=l) #plt.errorbar(lambdas, dgs, sds) plt.title(title, fontsize=18) plt.xlabel("$\lambda$",fontsize=18) plt.ylabel("energy (kcal/mol)",fontsize=18) plt.legend(fancybox=True, loc=2) plt.minorticks_on() plt.tick_params(axis='both',width=1.5,length=7,labelsize=16) plt.tick_params(which='minor',width=1.0,length=4) plt.savefig(outfname+'_int-decom.eps', format='eps') plt.clf()
def prettyplot(): ticks_font = font_manager.FontProperties(family='Helvetica', style='normal', size=16, weight='normal', stretch='normal') font = {'family': 'Helvetica', 'size': 10} matplotlib.rc('font',**font) #matplotlib.rc('ylabel',fontweight='bold',fontsize=18,labelpad=20) matplotlib.rcParams['axes.labelsize'] = 18 matplotlib.rcParams['axes.labelweight'] = 'bold' matplotlib.rcParams['axes.titlesize'] = 20 #matplotlib.rcParams['axes.titleweight'] = 'bold' plt.figure() ax = plt.axes() for label in ax.get_xticklabels(): #print label.get_text() label.set_fontproperties(ticks_font) for label in ax.get_yticklabels(): label.set_fontproperties(ticks_font) plt.minorticks_on() plt.tick_params(axis='both', which='major', labelsize=12) plt.gcf().subplots_adjust(bottom=0.15) plt.gcf().subplots_adjust(left=0.15) t = plt.title('') t.set_y(1.05) t.set_fontweight('bold') x = ax.set_xlabel('',labelpad=20) y = ax.set_ylabel('',labelpad=20)
def plotSeries(key, ymin=None, ymax=None): """ Plot the chosen dataset key for each scanned data file. @param key: data set key to use @type key: L{str} @param ymin: minimum value for y-axis or L{None} for default @type ymin: L{int} or L{float} @param ymax: maximum value for y-axis or L{None} for default @type ymax: L{int} or L{float} """ titles = [] for title, data in sorted(dataset.items(), key=lambda x: x[0]): titles.append(title) x, y = zip(*[(k / 3600.0, v[key]) for k, v in sorted(data.items(), key=lambda x: x[0]) if key in v]) plt.plot(x, y) plt.xlabel("Hours") plt.ylabel(key) plt.xlim(0, 24) if ymin is not None: plt.ylim(ymin=ymin) if ymax is not None: plt.ylim(ymax=ymax) plt.xticks((1, 4, 7, 10, 13, 16, 19, 22), (18, 21, 0, 3, 6, 9, 12, 15)) plt.minorticks_on() plt.gca().xaxis.set_minor_locator(AutoMinorLocator(n=3)) plt.grid(True, "major", "x", alpha=0.5, linewidth=0.5) plt.grid(True, "minor", "x", alpha=0.5, linewidth=0.5) plt.legend(titles, "upper left", shadow=True, fancybox=True) plt.show()
def plot_comparison_numpy(params1, label1, color1='black', params2=None, label2="", color2='red', title="", plotname=""): """Plot same fn for 2 sets of parameters, with indiviudal labels Uses numpy. """ pt = np.arange(0.5, 20, 0.5) corrections1 = pf_func(pt, params1) plt.plot(pt, corrections1, 'x-', color=color1, label=label1, lw=1.5) if params2: corrections2 = pf_func(pt, params2) plt.plot(pt, corrections2, 'd-', color=color2, label=label2, lw=1.5) plt.xlabel(r"$p_T^{in} \mathrm{[GeV]}$") plt.ylabel("Corr. factor") # plt.set_xscale('log') plt.minorticks_on() plt.grid(b=True, which='major', axis='both') plt.grid(b=True, which='minor', axis='both') plt.xlim(left=pt[0]-0.5) # draw intersection lines for 5, 10 for p, lc in zip([0.5, 5, 10], ["purple", "blue", "green"]): corr = pf_func(p, params1) plt.vlines(p, ymin=plt.ylim()[0], ymax=corr, color=lc, linestyle='dashed', linewidth=1.5, label=r'$p_T^{in}$' + ' = %g GeV,\ncorr. factor = %.3f' % (p, corr)) plt.hlines(corr, xmin=0, xmax=p, color=lc, linestyle='dashed', linewidth=1.5) plt.title(title) plt.legend(fontsize=12, loc=0) if plotname != "": plt.savefig(plotname) plt.cla()
def plot3panels(xax,p1,p2,p3,p4,ct): fig, axes = plt.subplots(nrows=4, ncols=1, sharex=True) plt.minorticks_on() fig.subplots_adjust(hspace = 0.001) plt.rc('font', family='serif',serif='Times') y_formatter = matplotlib.ticker.ScalarFormatter(useOffset=False) axes[0].plot(xax,p1,'D-',c='k',mec='b',fillstyle='none') axes[0].set_ylabel(r'$original$ $RMS$',fontsize=13) axes[0].yaxis.set_major_formatter(y_formatter) axes[0].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[1].plot(xax,p2,'D-',c='k',mec='b',fillstyle='none') axes[1].set_ylabel(r'$flattened$ $RMS$',fontsize=13) axes[1].yaxis.set_major_formatter(y_formatter) axes[1].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[2].plot(xax,p4,'D-',c='k',mec='b',fillstyle='none') axes[2].set_ylabel(r'$\sigma-clipped$',fontsize=13) axes[2].yaxis.set_major_formatter(y_formatter) axes[2].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[3].plot(xax,p3,'D-',c='k',mec='b',fillstyle='none',label='Soft') axes[3].set_ylabel(r'$\sigma$ $clipped$ $RMS$',fontsize=13) axes[3].set_xlabel(r'$aperture$ $(pixels)$',fontsize=13) axes[3].yaxis.set_major_formatter(y_formatter) axes[3].yaxis.set_major_locator(MaxNLocator(prune='both',nbins=5)) axes[3].legend(numpoints=1) plt.savefig(str(ct)+'smoothlightS.png',bbox_inches='tight',dpi=200)
def gbar_plot(dgs, sds, title, outfname): """ Plot free energy change deltaG over lambda. Parameters ---------- dgs: 1D array of dGs sds: 1D array of standard deviations corresponding to dgs array. If don't have this, just feed function a list of zeroes. title: string name of the main title outfname: string name of the image to be saved """ ### FOR SOME REASON THE ERROR BARS ARE DISPLAYING HORIZ ### EVEN WHEN DEFINING yerr=sds ... lambdas = np.linspace(0., 1., len(dgs)) # for x-axis, lambda from 0 to 1 plt.figure() plt.errorbar(lambdas, dgs) #plt.errorbar(lambdas, dgs, sds) plt.title(title, fontsize=18) plt.xlabel("$\lambda$",fontsize=18) plt.ylabel("$\Delta$G (kcal/mol)",fontsize=18) plt.minorticks_on() plt.tick_params(axis='both',width=1.5,length=7,labelsize=16) plt.tick_params(which='minor',width=1.0,length=4) plt.savefig(outfname+'_summary.eps', format='eps') plt.clf()
def plot_seeing(fwhm, tag=None): fig = plt.figure() plt.minorticks_on() ax = fig.add_subplot(111) print 'median seeing in g = ',numpy.median(fwhm['g']) print 'median seeing in r = ',numpy.median(fwhm['r']) print 'median seeing in i = ',numpy.median(fwhm['i']) print 'median seeing in z = ',numpy.median(fwhm['z']) riz = numpy.concatenate([fwhm['r'], fwhm['i'], fwhm['z']]) print 'median seeing in riz = ',numpy.median(riz) nbins = 40 range = (0.7, 1.7) #n, bins, p = ax.hist(riz, bins=nbins, range=range, histtype='step', fill=True, #color='black', facecolor='cyan', label='riz') #n, bins, p = ax.hist(fwhm['g'], bins=bins, histtype='step', color='green', label='g') #n, bins, p = ax.hist(fwhm['r'], bins=bins, histtype='step', color='red', label='r') #n, bins, p = ax.hist(fwhm['i'], bins=bins, histtype='step', color='magenta', label='i') #n, bins, p = ax.hist(fwhm['z'], bins=bins, histtype='step', color='blue', label='z') width = (range[1]-range[0])/nbins n, bins, p = ax.hist([fwhm['z'],fwhm['i'],fwhm['r']], bins=nbins, range=range, histtype='barstacked', fill=True, color=['black','purple','red'], width=width) ax.set_xlabel('Seeing FWHM (arcsec)') ax.set_ylabel('Number of exposures') ax.legend(reversed(p), ['r', 'i', 'z'], loc='upper right') ax.set_xlim(*range) plt.tight_layout() if tag is None: plt.savefig('seeing.pdf') else: plt.savefig('seeing_%s.pdf'%tag)
def plot_results(dists): for i, d in enumerate(dists): ax = plt.subplot(3,3,(4*i)+1) N, bins, patches = plt.hist(d.data, color="b",ec="k", bins=30, \ range=tuple(d.lims), normed=True, \ edgecolor="k", histtype='bar',linewidth=1.) fracs = N.astype(float)/N.max() norm = Normalize(-.2* fracs.max(), 1.5 * fracs.max()) for thisfrac, thispatch in zip(fracs, patches): color = cm.gray_r(norm(thisfrac)) thispatch.set_facecolor(color) thispatch.set_edgecolor("w") x = np.linspace(d.data.min(), d.data.max(), 100) ylim = ax.get_ylim() plt.plot(x, d.best.pdf(x), "-r", lw=1.5, alpha=0.7) ax.set_ylim(ylim) plt.axvline(d.best.MAPP, c="r", ls="--", lw=1.5) plt.tick_params(labelright=True, labelleft=False, labelsize=10) plt.xlim(d.lims) plt.locator_params(axis='x',nbins=10) if i < 2: plt.setp(ax.get_xticklabels(), visible=False) else: plt.xlabel(r"[$\mathregular{\alpha}$ / Fe]") plt.minorticks_on() def hist2D(dist1, dist2): """ Plot distribution and confidence contours. """ X, Y = np.mgrid[dist1.lims[0] : dist1.lims[1] : 20j, dist2.lims[0] : dist2.lims[1] : 20j] extent = [dist1.lims[0], dist1.lims[1], dist2.lims[0], dist2.lims[1]] positions = np.vstack([X.ravel(), Y.ravel()]) values = np.vstack([dist1.data, dist2.data]) kernel = stats.gaussian_kde(values) Z = np.reshape(kernel(positions).T, X.shape) ax.imshow(np.rot90(Z), cmap="gray_r", extent=extent, aspect="auto", interpolation="spline16") plt.axvline(dist1.best.MAPP, c="r", ls="--", lw=1.5) plt.axhline(dist2.best.MAPP, c="r", ls="--", lw=1.5) plt.tick_params(labelsize=10) ax.minorticks_on() plt.locator_params(axis='x',nbins=10) return ax = plt.subplot(3,3,4) hist2D(dists[0], dists[1]) plt.setp(ax.get_xticklabels(), visible=False) plt.ylabel("[Z/H]") plt.xlim(dists[0].lims) plt.ylim(dists[1].lims) ax = plt.subplot(3,3,7) hist2D(dists[0], dists[2]) plt.ylabel(r"[$\mathregular{\alpha}$ / Fe]") plt.xlabel("log Age (yr)") plt.xlim(dists[0].lims) plt.ylim(dists[2].lims) ax = plt.subplot(3,3,8) plt.xlabel("[Z/H]") hist2D(dists[1], dists[2]) plt.xlim(dists[1].lims) plt.ylim(dists[2].lims) return
def plot_density(x, primary=True): """ Creates a density plot of the data. Code is based on this forum message http://stackoverflow.com/a/4152016 :param x: (array like) the data """ # Calculate the density points density = gaussian_kde(x) # TODO: COme up with a better start and end point xs = linspace(min(x)-1, max(x)+1, 200) density.covariance_factor = lambda : 0.25 density._compute_covariance() plt.plot(xs,density(xs), color='#0066FF', alpha=0.7) # Add Grid lines plt.minorticks_on() plt.grid(b=True, which='major', color='#666666', linestyle='-') plt.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.2) # Render the plot if primary: plt.show()
def save(self): result = self.bands.all() # indexes are negative because they # represent the number of days in the past index = [ (i.get("index")+1)*-1 for i in result ] close = [ i.get("close") for i in result ] up = [ i.get("up") for i in result ] middle = [ i.get("middle") for i in result ] down = [ i.get("down") for i in result ] plt.plot(index, up, label="upper band") plt.plot(index, down, label="lower band") plt.plot(index, middle, label="middle band") plt.plot(index, close, label="close price") plt.xlabel("Past days (0 = today)") plt.ylabel("Value (USD$)") plt.title("%s bollinger bands" % (self.bands.symbol)) # enables the grid for every single decimal value plt.minorticks_on() plt.grid(True, which="both") legend = plt.legend(fancybox=True, loc="best") legend.get_frame().set_alpha(0.5) plt.savefig(self.bands.symbol+".png") # the plot must be closed for otherwhise matplotlib # will paint over previous plots plt.close()
def disp_frame(x_data, y_data, mag_data): ''' Show full frame. ''' coord, x_name, y_name = prep_plots.coord_syst() st_sizes_arr = prep_plots.star_size(mag_data) plt.gca().set_aspect('equal') # Get max and min values in x,y x_min, x_max = min(x_data), max(x_data) y_min, y_max = min(y_data), max(y_data) # Set plot limits plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) # If RA is used, invert axis. if coord == 'deg': plt.gca().invert_xaxis() # Set axis labels plt.xlabel('{} ({})'.format(x_name, coord), fontsize=12) plt.ylabel('{} ({})'.format(y_name, coord), fontsize=12) # Set minor ticks plt.minorticks_on() # Set grid plt.grid(b=True, which='major', color='k', linestyle='-', zorder=1) plt.grid(b=True, which='minor', color='k', linestyle='-', zorder=1) plt.scatter(x_data, y_data, marker='o', c='black', s=st_sizes_arr) plt.draw() print 'Plot displayed, waiting for it to be closed.'
def __init__(self, field, min_x, max_x, n_x, min_y, max_y, n_y): self.field = field self.min_x = min_x self.max_x = max_x self.n_x = n_x self.min_y = min_y self.max_y = max_y self.n_y = n_y self.X = np.linspace(min_x, max_x, n_x) self.Y = np.linspace(min_y, max_y, n_y) points = np.empty([n_y * n_x, 3]) for i in range(0, n_y): for j in range(0, n_x): points[n_x * i + j, :] = np.array([self.X[j], self.Y[i], 0.]) self.B = self.field.evaluate(points) self.legend_handles = [] plt.axis('equal') plt.grid(b = True, which = 'major') plt.grid(b = True, which = 'minor', color="0.75") plt.minorticks_on() plt.ylim([min_y, max_y]) plt.xlim([min_x, max_x])
def plot_v_of_t(volume_list,name,iteration): """Plots 2-volume as a function of proper time. Takes the output of make_v_of_t. name = name of simulation iteration = number of spacetime in ensemble. Might be sweep# instead.""" # Defines the plot vplot = plt.plot(volume_list, 'bo', volume_list, 'r-') # plot title is made of name+iteration plot_title = name+' '+str(iteration) # Labels and Titles plt.title(plot_title) plt.xlabel('Proper Time') plt.ylabel('2-Volume Per Time Slice') # Ensure the y range is appropriate plt.ylim([np.min(volume_list)-.5,np.max(volume_list)+.5]) # Turn on minor ticks plt.minorticks_on() # Show the plot plt.show() return
def start(self, event): #creates DataFrame with all the data from TDMS and Raman file d = { 'Filename': [], 'Ref_si': [], 'Elongation': [], 'Time': [], 'Force': [], 'StrainMacro': [], 'StrainSi': [], 'StressMacro': [], 'StressSi': [], 'Duration': [], 'pCov': [], 'Err_strain': [], 'Err_Stress': [] } df = pd.DataFrame(data=d) for r_file in self.raman_name[0]: print("parsing {:}".format(r_file)) print('{:}'.format(self.ref_start)) print('{:}'.format(self.ref_end)) try: r_o = rp.raman_spectrum(r_file, orientation=self.crystalorientation, file_type='t_scan', ref_start=self.ref_start, ref_end=self.ref_end, ref_si=520.7 - self.PowerShift) for iii, t in enumerate(r_o.time_epoch): if self.time_coef != 1: t = r_o.epoch + (t - r_o.epoch) * self.time_coef if self.time_offset != 0: t = t + self.time_offset eps_macro = 100 * self.tdms_file.get_Elongation( t, r_o.duration) / (self.tdms_file.Length * 1000) df = df.append( { 'Filename': r_o.filename, 'Ref_si': r_o.ref_si, 'Elongation': self.tdms_file.get_Elongation(t, r_o.duration), 'Time': t, 'Force': self.tdms_file.get_value(t, r_o.duration, 'Force'), 'StrainMacro': eps_macro, 'StrainSi': r_o.strain[iii], 'StressMacro': [], 'StressSi': r_o.stress[iii], 'Duration': r_o.duration, 'pCov': r_o.pcov_peak[iii], 'Err_strain': r_o.err_strain[iii], 'Err_stress': r_o.err_stress[iii] }, ignore_index=True) except: r_o = rp.raman_spectrum(r_file, orientation=self.crystalorientation, ref_start=self.ref_start, ref_end=self.ref_end, ref_si=520.7 - self.PowerShift) if self.time_offset != 0: eps_macro = 100 * self.tdms_file.get_Elongation( r_o.epoch + self.time_offset, r_o.duration) / (self.tdms_file.Length * 1000) else: eps_macro = 100 * self.tdms_file.get_Elongation( r_o.epoch, r_o.duration) / (self.tdms_file.Length * 1000) df = df.append( { 'Filename': r_o.filename, 'Ref_si': r_o.ref_si, 'Elongation': self.tdms_file.get_Elongation(r_o.epoch, r_o.duration), 'Time': r_o.epoch, 'Force': self.tdms_file.get_value(r_o.epoch, r_o.duration, 'Force'), 'StrainMacro': eps_macro, 'StrainSi': r_o.strain, 'StressMacro': [], 'StressSi': r_o.stress, 'Duration': r_o.duration, 'pCov': r_o.pcov_peak, 'Err_strain': r_o.err_strain, 'Err_stress': r_o.err_stress }, ignore_index=True) print(eps_macro) plt.figure() plt.errorbar(df['StrainMacro'], df['StrainSi'], df['Err_strain'] * 3 + 0.05, marker='o', markerfacecolor='None', color='k') # df.plot(x='StrainMacro',y='StrainSi', marker='v', markerfacecolor='None', color='k') plt.xlabel('Macroscopic strain %') plt.ylabel('Local Silicon Strain %') plt.gca().set_xlim(left=0) plt.minorticks_on() plt.title(self.tdms_file.filename) plt.show() # df.plot(x='StrainSi', y='Force', kind='scatter') # plt.show() # df.plot(x='StrainMacro',y='Force') # plt.show() # df.plot(x='Time',y='Elongation') # plt.show() # df.plot(x='Time', y='Force') # plt.show() # df.plot(x='StrainMacro',y='pCov',kind='scatter') # plt.show() # plt.figure() # plt.errorbar(df['StrainMacro'], df['StrainSi'],df['Err_strain']) # plt.show() df.to_csv('results.txt', sep='\t')
p_line, color='k', label=r'experiment, %s $\pm$ %s' % (np.round(pRgy, 1), np.round(errp, 1)), linewidth=3, alpha=0.6) plt.plot(Q2[r1:r2], r_line, color='r', label=r'fitted simulation, %s $\pm$ %s' % (np.round(rRgy, 2), np.round(errr, 2)), linewidth=3, alpha=0.6) plt.plot(Q2[r1:r2], s_line, color='b', label=r'all regions simulation, %s $\pm$ %s' % (np.round(sRgy, 2), np.round(errs, 2)), linewidth=3, alpha=0.6) plt.legend(loc='lower left', frameon=False, fontsize=9) plt.xlabel('$Q^2$ ($\AA^{-2}$)', fontsize=15) plt.xticks([0.002, 0.004, 0.006], fontsize=12) plt.yticks(fontsize=12) plt.ylabel('ln[I($Q$)/I($Q_{0}$)]', fontsize=15) plt.tight_layout() plt.minorticks_on() plt.savefig('figures/guinier_rgy.pdf') plt.show()
def draw_ga_evolution(self, make_pdf=True, show_plot=False): self.get_ga_input_from_file() min_list = [] max_list = [] avg_list = [] for i in range(self.start_from_gen, self.num_gen + 1): # print('reading gen ',i) self.generation = i try: fpop = self.get_pop_from_pop_file() min_, max_, avg_ = self.get_min_max_avg(fpop) if self.conversion_function: min_ = self.conversion_function(min_) max_ = self.conversion_function(max_) avg_ = self.conversion_function(avg_) min_list.append(min_) max_list.append(max_) avg_list.append(avg_) except: if self.generation == 0: raise ValueError('population files not found') self.num_gen = self.generation - 1 print('generation ', self.generation, ' pop file not found') print('results plotted until generation ', self.generation - 1) break fig = plt.gcf() plt.clf() fig.set_size_inches(12, 11) if not self.xticks: self.find_tick_size() if self.fit_type == 'max': plt.plot(min_list, color='black', lw=1, label='Minimum') plt.plot(max_list, color='black', lw=2, label='Maximum') loc = 4 else: plt.plot(min_list, color='black', lw=2, label='Minimum') plt.plot(max_list, color='black', lw=1, label='Maximum') loc = 1 plt.plot(avg_list, color='red' , lw=1, label='Average') plt.minorticks_on() plt.xlim((-self.xticks / 2.0, self.num_gen - self.start_from_gen)) if self.y_bounds['y_min']: y_min = self.y_bounds['y_min'] else: y_min = min(min_list) if self.y_bounds['y_max']: y_max = self.y_bounds['y_max'] else: y_max = max(max_list) if self.min_fit: plt.axhline(self.min_fit, color='red', ls=':', lw=0.5) string = self.fit_type + ' fit' plt.text(-self.xticks / 20, self.min_fit, string, horizontalalignment='right', color='red') # self.min_fit,self.num_gen-self.start_from_gen if self.min_fit < min(min_list): y_min = self.min_fit delta = y_max - y_min plt.ylim((y_min - (delta * 0.1), y_max + delta * 0.1)) plt.title('Function ' + self.fit_name + ' evolution', fontsize=self.title_size) plt.xlabel('Generation', fontsize=self.lable_size) plt.ylabel('Function ' + self.fit_name, fontsize=self.lable_size) labels = range(self.start_from_gen, self.num_gen, self.xticks) x = range(0, len(labels) * self.xticks, self.xticks) plt.xticks(x, labels) plt.grid(True) plt.legend(loc=loc) if make_pdf: plt.savefig(self.output_path + self.fit_name + '_evolution.pdf') if show_plot: plt.show() print('Evolution visualisation complete')
def stackedbarplot(self, responses, labels, figurename, legend_columns=3, samplesize=0, title=''): """ Create stacked bar plots showing the proportional responses to the given question. Parameters ---------- responses : numpy array Array of the responses for the given question. labels : list of strings The possible responses for the given question. figurename : str Short form name of the question asked. legend_columns : int, optional The number of columns in the legend. Vary to control legend layout. The default is 3. samplesize : int, optional The number of participants which have responded to given question. The default is 0. Returns ------- Saves the figure to pdf and png files. """ plt.close() print(f'Plotting the chart for {figurename} question..') sns.set_style( 'ticks', { 'axes.spines.right': False, 'axes.spines.top': False, 'axes.spines.left': False, 'ytick.left': False }) if responses.shape[0] == 2: ind = [0, 1] elif responses.shape[0] == 3: ind = [0, 0.85, 2] elif responses.shape[0] == 1: ind = [0] fig, ax = plt.subplots(figsize=(8.7, 6)) start, pos, = 0, [0, 0, 0] for i in range(responses.shape[1]): option = responses[:, i] plt.barh(ind, option, left=start, label=labels[i]) for k in range(len(ind)): xpos = pos[k] + option[k] / 2 percent = int(round(option[k] * 100)) if percent >= 10: plt.annotate(f'{str(percent)} %', xy=(xpos, ind[k]), ha='center', fontsize=15, color='1') elif percent < 3: pass else: plt.annotate(f'{str(percent)}', xy=(xpos, ind[k]), ha='center', fontsize=15, color='1') start = start + option pos = start plt.xlim(0, 1) if responses.shape[0] == 2: plt.yticks(ind, ('Post', 'Pre'), fontsize=18) elif responses.shape[0] == 3: plt.yticks(ind, ('Male', 'Female', 'All'), fontsize=18) elif responses.shape[0] == 1: plt.yticks(ind, '', fontsize=18) plt.xticks(fontsize=18) ax.xaxis.set_major_formatter(ticker.PercentFormatter(xmax=1)) plt.legend(bbox_to_anchor=(0, 0.99, 1, .05), loc=3, ncol=legend_columns, borderaxespad=0, fontsize=15) plt.minorticks_on() plt.figtext(0.9, 0.12, (f'Based on sample of {samplesize} participants'), fontsize=10, ha='right') pdffile = 'bar_' + figurename + '.pdf' pngfile = 'bar_' + figurename + '.png' plt.savefig(self.save_filepath / pdffile, bbox_inches='tight') plt.title(title, fontsize=20, pad=[85 if legend_columns < 3 else 50][0]) plt.savefig(self.save_filepath / pngfile, bbox_inches='tight', dpi=600) sns.set()
def plot_xy_list(x, y, fmts=[], labels=[], xlabel='', ylabel='', title='', xlim=[], ylim=[], minorticks=1, grid_major=1, grid_minor=0, xticks=[], yticks=[], figsize=(4.5, 3.0), alpha=[], left=0.15, bottom=0.15, right=0.97, top=0.97, legend_loc='best', xscale='linear', yscale='linear', savepath='', savedpi=300, showplot=1): if not (len(x) == len(y)): raise ValueError( 'number of arrays in X and Y are different. Exiting...') n = len(x) #### plot legends = 1 if (labels == []): labels = [''] * n legends = 0 if (fmts == []): aux = [] for i in range(n): if (i >= 10): i = i % 10 aux.append('-C{0:d}'.format(i)) fmts = aux if (alpha == []): alpha = [0.9] * n fig, ax = plt.subplots(figsize=figsize) plt.subplots_adjust(left, bottom, right, top) for i in range(n): plt.plot(x[i], y[i], fmts[i], label=labels[i], alpha=alpha[i]) plt.minorticks_on() plt.xlabel(xlabel) plt.ylabel(ylabel) plt.title(title) plt.tick_params(which='both', axis='both', direction='in', top=True, right=True) if (legends): plt.legend(loc=legend_loc) if (xlim != []): plt.xlim(xlim) if (ylim != []): plt.ylim(ylim) if (xticks != []): plt.xticks(xticks) if (yticks != []): plt.yticks(yticks) if (grid_major): plt.grid(which='major', alpha=0.5) if (grid_minor): plt.grid(which='minor', alpha=0.2) if (xscale in ['log', 'logarithm']): plt.xscale('log') if (yscale in ['log', 'logarithm']): plt.yscale('log') if (savepath != ''): plt.savefig(savepath, dpi=savedpi) if (showplot): plt.show() return fig, ax
def pl_2_param_dens(_2_params, gs, min_max_p2, varIdxs, params_trace): ''' Parameter vs parameters density map. ''' plot_dict = { 'metal-age': [0, 2, 2, 4, 0, 1], 'metal-ext': [0, 2, 4, 6, 0, 2], 'metal-dist': [0, 2, 6, 8, 0, 3], 'metal-mass': [0, 2, 8, 10, 0, 4], 'metal-binar': [0, 2, 10, 12, 0, 5], 'age-ext': [2, 4, 4, 6, 1, 2], 'age-dist': [2, 4, 6, 8, 1, 3], 'age-mass': [2, 4, 8, 10, 1, 4], 'age-binar': [2, 4, 10, 12, 1, 5], 'ext-dist': [4, 6, 6, 8, 2, 3], 'ext-mass': [4, 6, 8, 10, 2, 4], 'ext-binar': [4, 6, 10, 12, 2, 5], 'dist-mass': [6, 8, 8, 10, 3, 4], 'dist-binar': [6, 8, 10, 12, 3, 5], 'mass-binar': [8, 10, 10, 12, 4, 5] } labels = [ '$z$', '$log(age)$', '$E_{(B-V)}$', '$(m-M)_o$', '$M\,(M_{{\odot}})$', '$b_{frac}$' ] gs_x1, gs_x2, gs_y1, gs_y2, mx, my = plot_dict[_2_params] x_label, y_label = labels[mx], labels[my] ax = plt.subplot(gs[gs_y1:gs_y2, gs_x1:gs_x2]) # To specify the number of ticks on both or any single axes ax.locator_params(nbins=5) if gs_x1 == 0: plt.ylabel(y_label, fontsize=11) plt.yticks(rotation=45) else: ax.tick_params(labelleft=False) if gs_y2 == 12: plt.xlabel(x_label, fontsize=11) plt.xticks(rotation=45) else: ax.tick_params(labelbottom=False) plt.minorticks_on() if mx in varIdxs and my in varIdxs: mx_model, my_model = varIdxs.index(mx), varIdxs.index(my) ax.set_title(r"$\rho={:.2f}$".format( np.corrcoef([params_trace[mx_model], params_trace[my_model]])[0][1]), fontsize=11) hist2d(ax, params_trace[mx_model], params_trace[my_model]) mean_pos, width, height, theta = SigmaEllipse( np.array([params_trace[mx_model], params_trace[my_model]]).T) # Plot 95% confidence ellipse. plt.scatter(mean_pos[0], mean_pos[1], marker='x', c='b', s=30, linewidth=2, zorder=4) ellipse = Ellipse(xy=mean_pos, width=width, height=height, angle=theta, edgecolor='r', fc='None', lw=.7, zorder=4) ax.add_patch(ellipse) xp_min, xp_max, yp_min, yp_max = min_max_p2 ax.set_xlim([xp_min, xp_max]) ax.set_ylim([yp_min, yp_max]) # Grid won't respect 'zorder': # https://github.com/matplotlib/matplotlib/issues/5045 # So we plot the grid behind everything else manually. xlocs, xlabels = plt.xticks() ylocs, ylabels = plt.yticks() for xt in xlocs: plt.axvline(x=xt, linestyle='-', color='w', zorder=-4) for yt in ylocs: plt.axhline(y=yt, linestyle='-', color='w', zorder=-4)
def createCSV(filterSize, hashes, set1Size, set2SizeDiff, setDiff, measuring): # Determine Graph Labels and Title yLabel = "Time (ms)" if measuring == 1 else "Error Percent (%)" xLabel = "" arr = [] if type(filterSize) == list: arr = filterSize xLabel = "Filter Size" elif type(hashes) == list: arr = hashes xLabel = "# of Hashes" elif type(set1Size) == list: arr = set1Size xLabel = "Set Size" elif type(set2SizeDiff) == list: arr = set2SizeDiff xLabel = "Set Size Diff" elif type(setDiff) == list: arr = setDiff xLabel = "Size of Difference" title = xLabel + str(arr) + " by " + yLabel print(title) # Start CSV and write first line writer = csv.writer(open("results//" + title + ".csv", 'w', newline='' )) writer.writerow(["", "Regular", "Method1", "Method2"]) # Define Vectors to store information xAxis = [] regAxis = [] meth1 = [] meth2 = [] # The Main Loop Iterate through the designated variable for i in list(range(int(arr[0]), int(arr[1]), ceil(abs(int(arr[0]) - int(arr[1])) / DATA_POINTS))): xAxis.append(i) # Call BloomFilter Function and save result result = callBloomFilter(str(i) if type(filterSize) == list else filterSize, str(i) if type(hashes) == list else hashes, str(i) if type(set1Size) == list else set1Size, str(i) if type(set2SizeDiff) == list else set2SizeDiff, str(i) if type(setDiff) == list else setDiff) info = [i, result[0 + measuring], result[2 + measuring], result[4 + measuring]] writer.writerow(info) regAxis.append(0) meth1.append(result[2 + measuring] if measuring else (1-(float(result[2])/float(i if type(setDiff) == list else result[measuring]))) * 100) meth2.append(result[4 + measuring] if measuring else (1-(float(result[4])/float(i if type(setDiff) == list else result[measuring]))) * 100) fig, ax = plt.subplots() plt.plot(xAxis, regAxis, label="regAxis") # plotting the points plt.plot(xAxis, meth1, label="method 1") # plotting the points plt.plot(xAxis, meth2, label="method 2") # plotting the points desc = ("" if type(filterSize) == list else "M: " + filterSize + "\n") + \ ("" if type(hashes) == list else "H: " + ("optimal" if hashes == '-h' else hashes) + "\n") + \ ("" if type(set1Size) == list or type(set2SizeDiff) == list else "|A|: " + set1Size + "\n" + "|B|: " + str(int(set1Size) - int(set2SizeDiff)) + "\n") + \ ("" if type(setDiff) == list else "|C|: " + setDiff + "\n") desc = desc[:-1] plt.title(title) plt.xlabel(xLabel) # naming the x axis plt.ylabel(yLabel) # naming the y axis plt.legend() props = dict(boxstyle='round', facecolor='white', alpha=0.75) plt.text(0.30, 0.98, desc, transform=ax.transAxes, fontsize=10, verticalalignment='top', bbox=props) plt.minorticks_on() plt.grid(which='major', linestyle='dashed', linewidth='0.5', color='black') # Customize the major grid plt.grid(which='minor', linestyle=':', linewidth='0.5', color='black', alpha=.5) # Customize the minor grid # plt.show() plt.savefig("results//" + title + ".png") plt.gcf().clear() fig.clear() ax.clear() plt.close()
def insider_activity(other_args: List[str], stock: DataFrame, ticker: str, start: str, interval: str): """Display insider activity Parameters ---------- other_args : List[str] argparse other args - ["-n", "10"] stock : DataFrame Due diligence stock dataframe ticker : str Due diligence ticker symbol start : str Start date of the stock data interval : str Stock data interval """ parser = argparse.ArgumentParser( add_help=False, prog="ins", description= """Prints insider activity over time [Source: Business Insider]""", ) parser.add_argument( "-n", "--num", action="store", dest="n_num", type=check_positive, default=10, help="number of latest insider activity.", ) try: ns_parser = parse_known_args_and_warn(parser, other_args) if not ns_parser: return url_market_business_insider = ( f"https://markets.businessinsider.com/stocks/{ticker.lower()}-stock" ) text_soup_market_business_insider = BeautifulSoup( requests.get(url_market_business_insider, headers={ "User-Agent": get_user_agent() }).text, "lxml", ) d_insider = dict() l_insider_vals = list() for idx, insider_val in enumerate( text_soup_market_business_insider.findAll( "td", {"class": "table__td text-center"})): # print(insider_val.text.strip()) l_insider_vals.append(insider_val.text.strip()) # Add value to dictionary if (idx + 1) % 6 == 0: # Check if we are still parsing insider trading activity if "/" not in l_insider_vals[0]: break d_insider[(idx + 1) // 6] = l_insider_vals l_insider_vals = list() df_insider = pd.DataFrame.from_dict( d_insider, orient="index", columns=[ "Date", "Shares Traded", "Shares Held", "Price", "Type", "Option" ], ) df_insider["Date"] = pd.to_datetime(df_insider["Date"]) df_insider = df_insider.set_index("Date") df_insider = df_insider.sort_index(ascending=True) if start: df_insider = df_insider[start:] # type: ignore _, ax = plt.subplots() if interval == "1440min": plt.plot(stock.index, stock["Adj Close"].values, lw=3) else: # Intraday plt.plot(stock.index, stock["Close"].values, lw=3) plt.title(f"{ticker.upper()} (Time Series) and Price Target") plt.xlabel("Time") plt.ylabel("Share Price ($)") df_insider["Trade"] = df_insider.apply( lambda row: (1, -1)[row.Type == "Sell"] * float(row["Shares Traded"].replace( ",", "")), axis=1, ) plt.xlim(df_insider.index[0], stock.index[-1]) min_price, max_price = ax.get_ylim() price_range = max_price - min_price shares_range = (df_insider[df_insider["Type"] == "Buy"].groupby( by=["Date"]).sum()["Trade"].max() - df_insider[df_insider["Type"] == "Sell"].groupby( by=["Date"]).sum()["Trade"].min()) n_proportion = price_range / shares_range for ind in (df_insider[df_insider["Type"] == "Sell"].groupby( by=["Date"]).sum().index): if ind in stock.index: ind_dt = ind else: ind_dt = get_next_stock_market_days(ind, 1)[0] n_stock_price = 0 if interval == "1440min": n_stock_price = stock["Adj Close"][ind_dt] else: n_stock_price = stock["Close"][ind_dt] plt.vlines( x=ind_dt, ymin=n_stock_price + n_proportion * float(df_insider[df_insider["Type"] == "Sell"].groupby( by=["Date"]).sum()["Trade"][ind]), ymax=n_stock_price, colors="red", ls="-", lw=5, ) for ind in (df_insider[df_insider["Type"] == "Buy"].groupby( by=["Date"]).sum().index): if ind in stock.index: ind_dt = ind else: ind_dt = get_next_stock_market_days(ind, 1)[0] n_stock_price = 0 if interval == "1440min": n_stock_price = stock["Adj Close"][ind_dt] else: n_stock_price = stock["Close"][ind_dt] plt.vlines( x=ind_dt, ymin=n_stock_price, ymax=n_stock_price + n_proportion * float(df_insider[df_insider["Type"] == "Buy"].groupby( by=["Date"]).sum()["Trade"][ind]), colors="green", ls="-", lw=5, ) plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) if gtff.USE_ION: plt.ion() plt.show() l_names = list() for s_name in text_soup_market_business_insider.findAll( "a", {"onclick": "silentTrackPI()"}): l_names.append(s_name.text.strip()) df_insider["Insider"] = l_names print( df_insider.sort_index(ascending=False).head( n=ns_parser.n_num).to_string()) print("") except Exception as e: print(e) print("") return
def price_target_from_analysts(other_args: List[str], stock: DataFrame, ticker: str, start: str, interval: str): """Display analysts' price targets for a given stock Parameters ---------- other_args : List[str] argparse other args - ["-n", "10"] stock : DataFrame Due diligence stock dataframe ticker : str Due diligence ticker symbol start : str Start date of the stock data interval : str Stock data interval """ parser = argparse.ArgumentParser( add_help=False, prog="pt", description= """Prints price target from analysts. [Source: Business Insider]""", ) parser.add_argument( "-n", "--num", action="store", dest="n_num", type=check_positive, default=10, help="number of latest price targets from analysts to print.", ) try: ns_parser = parse_known_args_and_warn(parser, other_args) if not ns_parser: return url_market_business_insider = ( f"https://markets.businessinsider.com/stocks/{ticker.lower()}-stock" ) text_soup_market_business_insider = BeautifulSoup( requests.get(url_market_business_insider, headers={ "User-Agent": get_user_agent() }).text, "lxml", ) d_analyst_data = None for script in text_soup_market_business_insider.find_all("script"): # Get Analyst data if "window.analyseChartConfigs.push" in str(script): # Extract config data: s_analyst_data = (str(script).split("config: ", 1)[1].split(",\r\n", 1)[0]) d_analyst_data = json.loads(s_analyst_data) break df_analyst_data = pd.DataFrame.from_dict( d_analyst_data["Markers"]) # type: ignore df_analyst_data = df_analyst_data[[ "DateLabel", "Company", "InternalRating", "PriceTarget" ]] df_analyst_data.columns = ["Date", "Company", "Rating", "Price Target"] # df_analyst_data df_analyst_data["Rating"].replace( { "gut": "BUY", "neutral": "HOLD", "schlecht": "SELL" }, inplace=True) df_analyst_data["Date"] = pd.to_datetime(df_analyst_data["Date"]) df_analyst_data = df_analyst_data.set_index("Date") # Slice start of ratings if start: df_analyst_data = df_analyst_data[start:] # type: ignore if interval == "1440min": plt.plot(stock.index, stock["Adj Close"].values, lw=3) # Intraday else: plt.plot(stock.index, stock["Close"].values, lw=3) if start: plt.plot(df_analyst_data.groupby( by=["Date"]).mean()[start:]) # type: ignore else: plt.plot(df_analyst_data.groupby(by=["Date"]).mean()) plt.scatter(df_analyst_data.index, df_analyst_data["Price Target"], c="r", s=40) plt.legend(["Closing Price", "Average Price Target", "Price Target"]) plt.title(f"{ticker} (Time Series) and Price Target") plt.xlim(stock.index[0], stock.index[-1]) plt.xlabel("Time") plt.ylabel("Share Price ($)") plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) if gtff.USE_ION: plt.ion() plt.show() print("") pd.set_option("display.max_colwidth", None) print( df_analyst_data.sort_index(ascending=False).head( ns_parser.n_num).to_string()) print("") except Exception as e: print(e) print("") return
def plotHistogram(self): # Create the figure to hold the image canvas. self.fig = plt.figure() # Create the canvas to hold the plot image. self.canvas = fc(self.fig) # Add the canvas to the box container. # First remove any other children (previous plots). for child in self.histBox.get_children(): self.histBox.remove(child) self.histBox.pack_start(self.canvas, True, True, 0) # Calculate derivatives. # Potentially use them for detecting turning points for colour changes. # Not being plotted at this stage. firstDeriv = [0 for i in range(self.chaos.maxIterations)] for i in range(1, (self.chaos.maxIterations - 1)): firstDeriv[i - 1] = self.chaos.hist[i] - self.chaos.hist[i - 1] # Put divergence iterations into bins for histogram plot. self.doHistogramBins() # Option to not include max iterations in histogram. # Depending on the image max iterations can swamp the histogram. # Also option to plot as bar graph or as line plot instead. if self.chaos.incMaxIterations == True: if self.chaos.histLinePlot == True: plt.plot(self.chaos.bins, self.chaos.hist, color='blue', linewidth=1, marker='o', markersize=2) else: plt.bar(self.chaos.bins, self.chaos.hist, color='blue') else: if self.chaos.histLinePlot == True: plt.plot(self.chaos.bins[:-1], self.chaos.hist[:-1], color='blue', linewidth=1, marker='o', markersize=2) else: plt.bar(self.chaos.bins[:-1], self.chaos.hist[:-1], color='blue') plt.xlabel('Iteration on Divergence') plt.ylabel('Frequency') plt.title('Histogram of Divergence Iterations') # Option to use log scale for iteration count axis (y). # Depending on the plot can make it easier to read. if self.chaos.logItsCounts == True: plt.yscale('log') plt.minorticks_on() plt.tick_params(which='major', length=8, width=2, direction='out') plt.tick_params(which='minor', length=4, width=2, direction='out') # Show the histogram dialog. self.winHistogram.show_all() # Histogram present flag set. self.chaos.histogramPresent = True
def Optimize(Item, Mode): global Global_Power, Profile_List, Eff_Turbine, Eff_Gearbox, Eff_Generator, Startup_Cost, Payback_Time if Item == "blade_size": Power = [] Diameters = np.arange(1,12.5,0.5) Profile_List = [] print("Calculating blade diameter optimization") if Mode == "single": Setup_Profile([[1,12],[0,5],[2,0]]) elif Mode == "double": Setup_Profile([[3,14],[0,5],[2,0],[4,7]]) for Turbine in np.arange(1,21,1): Power.append([]) for Diameter in np.arange(1,12.5,0.5): Total_Mechanical_Energy = 0 Run_Simulation(step=100, tidal_function="Newport_1", turbines=Turbine, diameter=Diameter, slucies=0, sluice_size=80, profile=1, time=60*60*24*365, econ=False, output=False, graphs=False, graph_head=False, graph_QV=False, graph_P=False) Cut_Count = 0 for Energy in Global_Power: Total_Mechanical_Energy += Energy*100 Power[Turbine-1].append(Total_Mechanical_Energy) plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() plt.title("Energy Output Vs Blade Diameter For Different Number of Turbines In Single Effect Mode (Newport Tidal Data)") ax.set_xlabel("Blade Diameter (m)") ax.set_ylabel("Energy Output (J)") Count = 0 for i in Power: Temp = ax.plot(Diameters, Power[Count], label=("Turbines: " + str(Count+1)), linewidth=2) Count += 1 #ax.legend("1 turbine","2 turbines","3 turbines","4 turbines","5 turbines","6 turbines","7 turbines","8 turbines","9 turbines","10 turbines","11 turbines","12 turbines","13 turbinse","14 turbines","15 turbines","16 turbines","17 turbines","18 turbines","19 turbines","20 turbines",) ax.legend() plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) elif Item == "turbine_number": Power = [] Turbines = np.arange(1,31,1) Diameters = np.arange(1,12.5,0.5) Profile_List = [] print("Calculating turbine number optimization") if Mode == "single": Setup_Profile([[1,12],[0,5],[2,0]]) elif Mode == "double": Setup_Profile([[3,14],[0,5],[2,0],[4,7]]) Count = 0 for Diameter in np.arange(1,12.5,0.5): Power.append([]) Count += 1 for Turbine in np.arange(1,31,1): Total_Mechanical_Energy = 0 Run_Simulation(step=100, tidal_function="Newport_1", turbines=Turbine, diameter=Diameter, slucies=0, sluice_size=80, profile=1, time=60*60*24*365, econ=False, output=False, graphs=False, graph_head=False, graph_QV=False, graph_P=False) for Energy in Global_Power: #Global_Power_Elec.append(i*Eff_Turbine*Eff_Gearbox*Eff_Generator) Total_Mechanical_Energy += Energy*100 Power[Count-1].append(Total_Mechanical_Energy) plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() if Mode == "single": plt.title("Energy Output Vs Number of Turbines For Different Blade Diameters In Single Effect Mode (Newport Tidal Data)") elif Mode == "double": plt.title("Energy Output Vs Number of Turbines For Different Blade Diameters In Double Effect Mode (Newport Tidal Data)") ax.set_xlabel("Number of Turbines") ax.set_ylabel("Energy Output (J)") Count = 0 for i in Power: Temp = ax.plot(Turbines, Power[Count], label=("Diameter: " + str(Diameters[Count])), linewidth=2) Count += 1 #ax.legend("1 turbine","2 turbines","3 turbines","4 turbines","5 turbines","6 turbines","7 turbines","8 turbines","9 turbines","10 turbines","11 turbines","12 turbines","13 turbinse","14 turbines","15 turbines","16 turbines","17 turbines","18 turbines","19 turbines","20 turbines",) ax.legend() plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) elif Item == "algorithm": Profile_List = [] Power = [] Triggers = np.arange(13,-1,-0.5) for i in range(len(Triggers)): Triggers[i] = (Triggers[i]/13)*100 Count = 1 Turbine_Count = 0 for Turbines in np.arange(1,26,1): Power.append([]) Profile_List = [] Turbine_Count += 1 Count = 1 for i in np.arange(13,-1,-0.5): Total_Mechanical_Energy = 0 Setup_Profile([[3,14],[0,i],[2,0],[4,0]]) Run_Simulation(step=100, tidal_function="sine", turbines=Turbine_Count, diameter=5.87, slucies=0, sluice_size=80, profile=Count, time=150000, econ=False, output=False, graphs=False, graph_head=False, graph_QV=False, graph_P=False) for Energy in Global_Power: #Global_Power_Elec.append(i*Eff_Turbine*Eff_Gearbox*Eff_Generator) Total_Mechanical_Energy += Energy Power[Turbine_Count-1].append(Total_Mechanical_Energy) Count += 1 plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() plt.title("Energy Output Vs Sluice Gate Triggering Height (Double Effect)") ax.set_xlabel("Lagoon Triggering Height as (Percentage of Tidal Range)") ax.set_ylabel("Energy Output (J)") Count = 0 for i in Power: Temp = ax.plot(Triggers, Power[Count], label=("Turbines: " + str(Count+1)), linewidth=2) Count += 1 ax.legend() plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) elif Item == "power": Power = [[],[]] Per_Turbine = [] Turbines = np.arange(1,41,1) for Turbine in np.arange(1,41,1): Run_Simulation(step=100, tidal_function="Newport_1", turbines=Turbine, diameter=5.87, slucies=0, sluice_size=80, profile=1, time=60*60*24*365, econ=False, output=False, graphs=False, graph_head=False, graph_QV=False, graph_P=False) Max_Mechanical_Power = max(Global_Power) Max_Electrical_Power = (Max_Mechanical_Power*Eff_Gearbox*Eff_Generator) Power[0].append(Max_Mechanical_Power) Power[1].append(Max_Electrical_Power) Per_Turbine.append(Max_Electrical_Power/Turbine) plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() plt.title("Max Power Output Vs Number of Turbines") ax.set_xlabel("Number of Turbines") ax.set_ylabel("Max Power Output (W)") Temp = ax.plot(Turbines, Power[0], label=("Mechanical Power"), color="gold", linewidth=2) Temp2 = ax.plot(Turbines, Power[1], "--", label=("Electrical Power"), color="y", linewidth=2) Temp2 = ax.plot(Turbines, Per_Turbine, "--", label=("Electrical Power Per Turbine"), color="red", linewidth=2) ax.legend() plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) elif Item == "payback": Profile_List = [] Turbines = np.arange(3,41,1) Startup_Costs = [] Payback_Times = [] if Mode == "single": Setup_Profile([[1,12],[0,5],[2,0]]) elif Mode == "double": Setup_Profile([[3,14],[0,5],[2,0],[4,7]]) for Turbine in Turbines: Run_Simulation(step=100, tidal_function="Newport_1", turbines=Turbine, diameter=5.87, slucies=0, sluice_size=80, profile=1, time=60*60*24*365, econ=True, output=False, graphs=False, graph_head=False, graph_QV=False, graph_P=False) Startup_Costs.append(Startup_Cost) Payback_Times.append(Payback_Time) plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() plt.title("Number of Turbines Vs Payback Time") ax.set_xlabel("Number of Turbines") ax.set_ylabel("Payback Time (Years)") Temp = ax.plot(Turbines, Payback_Times, label=("Diameter: 5.87m"), color="blue", linewidth=2) ax.legend() plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5)
def getAlpha(telluric_data,lsf,continuum=True,test=False,save_path=None): """ Return a best alpha value from a telluric data. """ alpha_list = [] test_alpha = np.arange(0.1,7,0.1) data = copy.deepcopy(telluric_data) if continuum is True: data = nsp.continuumTelluric(data=data, order=data.order) for i in test_alpha: telluric_model = nsp.convolveTelluric(lsf,data, alpha=i) #telluric_model.flux **= i if data.order == 59: # mask hydrogen absorption feature data2 = copy.deepcopy(data) tell_mdl = copy.deepcopy(telluric_model) mask_pixel = 450 data2.wave = data2.wave[mask_pixel:] data2.flux = data2.flux[mask_pixel:] data2.noise = data2.noise[mask_pixel:] tell_mdl.wave = tell_mdl.wave[mask_pixel:] tell_mdl.flux = tell_mdl.flux[mask_pixel:] chisquare = nsp.chisquare(data2,tell_mdl) else: chisquare = nsp.chisquare(data,telluric_model) alpha_list.append([chisquare,i]) if test is True: plt.plot(telluric_model.wave,telluric_model.flux+i*10, 'k-',alpha=0.5) if test is True: plt.plot(telluric_data.wave,telluric_data.flux, 'r-',alpha=0.5) plt.rc('font', family='sans-serif') plt.title("Test Alpha",fontsize=15) plt.xlabel("Wavelength ($\AA$)",fontsize=12) plt.ylabel("Transmission + Offset",fontsize=12) plt.minorticks_on() if save_path is not None: plt.savefig(save_path+\ "/{}_O{}_alpha_data_mdl.png"\ .format(telluric_data.name, telluric_data.order)) plt.show() plt.close() fig, ax = plt.subplots() plt.rc('font', family='sans-serif') for i in range(len(alpha_list)): ax.plot(alpha_list[i][1],alpha_list[i][0],'k.',alpha=0.5) ax.plot(min(alpha_list)[1],min(alpha_list)[0],'r.', label="best alpha {}".format(min(alpha_list)[1])) ax.set_xlabel(r"$\alpha$",fontsize=12) ax.set_ylabel("$\chi^2$",fontsize=12) plt.minorticks_on() plt.legend(fontsize=10) if save_path is not None: plt.savefig(save_path+\ "/{}_O{}_alpha_chi2.png"\ .format(telluric_data.name, telluric_data.order)) plt.show() plt.close() alpha = min(alpha_list)[1] return alpha
def plot(args): """Logging format: Epoch 0 Update 52 Cost 219.29276 G2 1483.47644 UD 2.78200 Time 127.66500 s """ colors = ['c', 'r', 'y', 'k', 'b', 'g'] for f_idx, filename in enumerate(args.filenames): with open(filename, 'r') as f: iterations = [] costs = [] valid_iterations = [] valid_costs = [] small_train_costs = [] for line in f: if line.startswith('Epoch'): words = line.split() iterations.append(int(words[3])) costs.append(float(words[5])) elif line.startswith('Valid'): words = line.split() valid_iterations.append( iterations[-1] if iterations else 0) valid_costs.append(words[2]) small_train_costs.append(words[6]) avg_costs = [ average(costs[max(0, i - args.average):i]) for i in xrange(len(costs)) ] # Get intervals iterations = [ iterations[i] for i in xrange(0, len(iterations), args.interval) ] avg_costs = [ avg_costs[i] for i in xrange(0, len(avg_costs), args.interval) ] if args.train: plt.plot(iterations, avg_costs, '{}-'.format(colors[f_idx]), label='{}_train'.format(filename)) if args.valid: plt.plot(valid_iterations, valid_costs, '{}--'.format(colors[f_idx]), label='{}_valid'.format(filename)) if args.small_train: plt.plot(valid_iterations, small_train_costs, '{}-.'.format(colors[f_idx]), label='{}_small_train'.format(filename)) plt.xlim(xmin=args.xmin, xmax=args.xmax) plt.ylim(ymin=args.ymin, ymax=args.ymax) plt.minorticks_on() plt.title('Costs') plt.legend(loc='upper right') plt.grid(which='both') plt.show()
def Run_Simulation(**kwargs): #Parameters passed: Step_Size = kwargs["step"] Tidal_Function = kwargs["tidal_function"] Turbines = kwargs["turbines"] Turbine_Diameter = kwargs["diameter"] Sluices = kwargs["slucies"] Sluice_Size = kwargs["sluice_size"] Profile_Number = kwargs["profile"] Run_Time = kwargs["time"] Econ = kwargs["econ"] Output = kwargs["output"] Graphs = kwargs["graphs"] Graph_Head = kwargs["graph_head"] Graph_QV = kwargs["graph_QV"] Graph_P = kwargs["graph_P"] #Global Variables global Civil_Price, Blade_Price, Turbine_Price, Gearbox_Price, Generator_Price, Sluice_Price, Startup_Cost, Payback_Time global Eff_Turbine, Eff_Gearbox, Eff_Generator global Global_Time, Global_Volume, Global_Tide, Global_Head, Global_Head_Difference, Global_Velocity, Global_Discharge, Global_Head_Loss, Global_Power, Global_Power_Elec Global_Time = [0] Global_Volume = [0] Global_Tide = [0] Global_Head = [0] Global_Head_Difference = [0] Global_Velocity = [0] Global_Discharge = [0] Global_Head_Loss = [0] Global_Power = [0] Global_Power_Elec = [] #Local Variables V_0 = 18891820 Euler_Volume = [0] Euler_Volume_Tide = [0] Euler_Volume[0] = V_0 Euler_Volume_Tide[0] = V_0 Time = [0] M = 1453217 G = 9.807 rho = 1029 Discharge_Coefficient= 0.8 Sluicing_Discharge_Coefficient = 0.98 Friction_Factor = 0.00035 Draft_Length = 10 Draft_Diameter = 10 State = 0 #0 = Waiting, 1 = filling sluice, 2 = draining generation, 3 = filling generation, 4 = draining sluice Current_Time = Step_Size Operational_Profile = Profile_List[Profile_Number-1] Profile_Stage = 0 Total_Stages = len(Operational_Profile) Total_Mechanical_Energy = 0 Total_Electrical_Energy = 0 #Initial Calculations Area = np.pi*np.power((Turbine_Diameter/4),2)*Turbines Sluice_Area = (np.pi*np.power((Turbine_Diameter/4),2)*Turbines)+(Sluice_Size*Sluices) ''' Read operational algorithm profile, then switch between 5 different states, checking stopping conditions each time. Put Euler approximation in different function Operational states: -Filling_Sluice -Filling_Generation -Draining_Sluice -Draining_Generation -Transition (waiting) Save data to global arrays Plot graph using given parameters If value in square root turns negative, return an error. (This should only happen if sluice gates are not shut at the right time, it means the tide is higher than the lagoon) ''' if Output == True: log.setLevel(logging.INFO) else: log.setLevel(logging.WARNING) print("\nRunning simulation...\n") State = Operational_Profile[0][0] Average_Head = 0 AH_Count = 0 Runtime = 0 Flow_Rate_Average = 0 Max_Head = [] while Current_Time < Run_Time: if State == 0: log.info("In state 0: Waiting for tidal shift") while State == 0: if Current_Time > Run_Time: State = 5 log.info("Runtime elapsed") elif Global_Tide[-1] < Operational_Profile[Profile_Stage][1]: Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] else: Global_Time.append(Current_Time) Global_Tide.append(Evaluate_Tidal_Function(Tidal_Function, Current_Time)) Global_Volume.append(Global_Volume[-1]) Global_Head.append((Global_Volume[-1])/(M)) Global_Head_Difference.append(Global_Head[-1]-Global_Tide[-1]) Global_Velocity.append(0) Global_Discharge.append(0) Global_Head_Loss.append(0) Global_Power.append(0) Current_Time += Step_Size if State == 1: log.info("In state 1: Filling lagoon via sluicing") while State == 1: if Current_Time > Run_Time: State = 5 log.info("Runtime elapsed") elif Global_Head[-1] > Operational_Profile[Profile_Stage][1]: Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] else: Global_Time.append(Current_Time) Global_Tide.append(Evaluate_Tidal_Function(Tidal_Function, Current_Time)) #Global_Volume.append(Global_Volume[Current_Time-1]+Step_Size*Sluice_Area*Sluicing_Discharge_Coefficient*np.sqrt(2*G)*np.sqrt(Global_Tide[Current_Time]-((Global_Volume[Current_Time-1])/(M))-Pipe_Loss)) if (Global_Tide[-1])-((Global_Volume[-1])/(M)) < 0: log.info("WARNING: Target value of " + str(Operational_Profile[Profile_Stage][1]) + "m in state 1 could not be met!") Global_Volume.append(Global_Volume[-1]) Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] Global_Velocity.append(Global_Velocity[-1]) Global_Discharge.append(Global_Discharge[-1]) else: #Global_Volume.append(Global_Volume[-1]+Step_Size*Sluice_Area*Sluicing_Discharge_Coefficient*np.sqrt(2*G)*np.sqrt((Global_Tide[-1])-((Global_Volume[-1])/(M))-Pipe_Loss)) #Global_Velocity.append(np.sqrt(2*G)*np.sqrt((Global_Tide[-1])-((Global_Volume[-1])/(M))-Pipe_Loss)) Global_Velocity.append(np.sqrt(2*G*((Global_Tide[-1])-((Global_Volume[-1])/(M)))/(1+Friction_Factor*(Draft_Length/Draft_Diameter)))) Global_Volume.append(Global_Volume[-1]+Step_Size*Sluice_Area*Sluicing_Discharge_Coefficient*Global_Velocity[-1]) Global_Discharge.append(Global_Velocity[-1]*Sluice_Area*Sluicing_Discharge_Coefficient) Global_Head.append((Global_Volume[-1])/(M)) Global_Head_Difference.append(Global_Head[-1]-Global_Tide[-1]) Global_Head_Loss.append(0) Global_Power.append(0) Current_Time += Step_Size #print("the time is: " + str(Current_Time)) if State == 2: log.info("In state 2: Draining lagoon via energy generation") while State == 2: if Current_Time > Run_Time: State = 5 log.info("Runtime elapsed") elif Global_Head[-1] < Operational_Profile[Profile_Stage][1]: Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] #Current_Time = Run_Time+1 else: Global_Time.append(Current_Time) Global_Tide.append(Evaluate_Tidal_Function(Tidal_Function, Current_Time)) if (Global_Volume[-1])/(M)-(Global_Tide[-1]) < 0: log.info("WARNING: Target value of " + str(Operational_Profile[Profile_Stage][1]) + "m in state 2 could not be met!") Global_Volume.append(Global_Volume[-1]) Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] Global_Velocity.append(Global_Velocity[-1]) Global_Discharge.append(Global_Discharge[-1]) Global_Head_Loss.append(Global_Head_Loss[-1]) Global_Power.append(Global_Power[-1]) else: #Global_Volume.append(Global_Volume[-1]-Step_Size*Area*Discharge_Coefficient*np.sqrt(2*G)*np.sqrt((Global_Volume[-1])/(M)-(Global_Tide[-1])-Turbine_Loss-Pipe_Loss)) #Global_Velocity.append(np.sqrt(2*G)*np.sqrt((Global_Volume[-1])/(M)-(Global_Tide[-1])-Turbine_Loss-Pipe_Loss)) Global_Velocity.append(np.sqrt((2*G*((Global_Volume[-1])/(M)-(Global_Tide[-1]))*(1-Eff_Turbine))/(1-Eff_Turbine*Friction_Factor*(Draft_Length/Draft_Diameter)))) Global_Volume.append(Global_Volume[-1]-Step_Size*Area*Discharge_Coefficient*Global_Velocity[-1]) Global_Discharge.append(Global_Velocity[-1]*Area*Discharge_Coefficient) Global_Head_Loss.append(((Global_Volume[-1])/(M)-Global_Tide[-1]-(Friction_Factor*(Draft_Length/Draft_Diameter)*(Global_Velocity[-1]**2/(2*G))))) Global_Power.append(rho*G*Global_Discharge[-1]*Global_Head_Loss[-1]) Global_Head.append((Global_Volume[-1])/(M)) Global_Head_Difference.append(Global_Head[-1]-Global_Tide[-1]) Current_Time += Step_Size Average_Head += abs(Global_Head_Difference[-1]) Max_Head.append(abs(Global_Head_Difference[-1])) AH_Count += 1 if Current_Time > 50000: Runtime += 1*Step_Size Flow_Rate_Average += Global_Discharge[-1] if State == 3: log.info("In state 3: Filling lagoon via energy generation") while State == 3: if Current_Time > Run_Time: State = 5 log.info("Runtime elapsed") elif Global_Head[-1] > Operational_Profile[Profile_Stage][1]: Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] else: Global_Time.append(Current_Time) Global_Tide.append(Evaluate_Tidal_Function(Tidal_Function, Current_Time)) #Global_Volume.append(Global_Volume[Current_Time-1]+Step_Size*Sluice_Area*Sluicing_Discharge_Coefficient*np.sqrt(2*G)*np.sqrt(Global_Tide[Current_Time]-((Global_Volume[Current_Time-1])/(M))-Pipe_Loss)) if (Global_Tide[-1])-((Global_Volume[-1])/(M)) < 0: log.info("WARNING: Target value of " + str(Operational_Profile[Profile_Stage][1]) + "m in state 1 could not be met!") Global_Volume.append(Global_Volume[-1]) Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] Global_Velocity.append(Global_Velocity[-1]) Global_Discharge.append(Global_Discharge[-1]) Global_Head_Loss.append(Global_Head_Loss[-1]) Global_Power.append(Global_Power[-1]) else: #Global_Volume.append(Global_Volume[-1]+Step_Size*Sluice_Area*Sluicing_Discharge_Coefficient*np.sqrt(2*G)*np.sqrt((Global_Tide[-1])-((Global_Volume[-1])/(M))-Pipe_Loss)) #Global_Velocity.append(np.sqrt(2*G)*np.sqrt((Global_Tide[-1])-((Global_Volume[-1])/(M))-Pipe_Loss)) Global_Velocity.append(np.sqrt((2*G*((Global_Tide[-1])-(Global_Volume[-1])/(M))*(1-Eff_Turbine))/(1-Eff_Turbine*Friction_Factor*(Draft_Length/Draft_Diameter)))) Global_Volume.append(Global_Volume[-1]+Step_Size*Area*Discharge_Coefficient*Global_Velocity[-1]) Global_Discharge.append(Global_Velocity[-1]*Area*Discharge_Coefficient) Global_Head_Loss.append(Global_Tide[-1]-((Global_Volume[-1])/(M))-(Friction_Factor*(Draft_Length/Draft_Diameter)*(Global_Velocity[-1]**2/(2*G)))) Global_Power.append(rho*G*Global_Discharge[-1]*Global_Head_Loss[-1]) Global_Head.append((Global_Volume[-1])/(M)) Global_Head_Difference.append(Global_Head[-1]-Global_Tide[-1]) Current_Time += Step_Size Average_Head += abs(Global_Head_Difference[-1]) Max_Head.append(abs(Global_Head_Difference[-1])) AH_Count += 1 if Current_Time > 50000: Runtime += 1*Step_Size Flow_Rate_Average += Global_Discharge[-1] if State == 4: log.info("In state 4: Waiting for tidal shift") while State == 4: if Current_Time > Run_Time: State = 5 log.info("Runtime elapsed") elif Global_Tide[-1] > Operational_Profile[Profile_Stage][1]: Profile_Stage = (Profile_Stage+1)%Total_Stages State = Operational_Profile[Profile_Stage][0] else: Global_Time.append(Current_Time) Global_Tide.append(Evaluate_Tidal_Function(Tidal_Function, Current_Time)) Global_Volume.append(Global_Volume[-1]) Global_Head.append((Global_Volume[-1])/(M)) Global_Head_Difference.append(Global_Head[-1]-Global_Tide[-1]) Global_Velocity.append(0) Global_Discharge.append(0) Global_Head_Loss.append(0) Global_Power.append(0) Current_Time += Step_Size log.info("Current time: " + str(Current_Time)) Average_Head = (Average_Head/AH_Count) Flow_Rate_Average = (Flow_Rate_Average/AH_Count) #50000 start offset, run for 86400 print("\nSimulation complete\n") print("Average head difference across turbines (m): " + str(Average_Head)) print("Max head difference across turbines (m): " + str(max(Max_Head))) print("Run time (s): " + str(Runtime)) print("Average discharge (m^3s^-1): " + str(Flow_Rate_Average)) #Energy generation calculations print("\n================================================================") print("Running energy calculations...") for Power in Global_Power: if np.isnan(Power) == True: Power = 0 Global_Power_Elec.append(Power*Eff_Gearbox*Eff_Generator) Total_Mechanical_Energy += int(Power*Step_Size) Total_Electrical_Energy += int(Global_Power_Elec[-1]*Step_Size) Energy_Lost = Total_Mechanical_Energy-Total_Electrical_Energy Efficiency = (Eff_Turbine*Eff_Gearbox*Eff_Generator) print("Total mechanical energy generated (J): " + str(Total_Mechanical_Energy)) print("Energy lost (J): " + str(Energy_Lost)) print("System efficiency: " + str(Efficiency)) print("Total electrical energy generated (J): " + str(Total_Electrical_Energy)) print("Total electrical energy generated (kWh): " + str(Total_Electrical_Energy/3.6e+6)) #Print peak power and total energy #Print final power and energy and percentage lost if Econ == True: #Section for calculating economic assessment. print("\n================================================================") print("Running economic assessment of configuration...") Startup_Cost = Civil_Price+(Turbines*(Blade_Price+Shaft_Price+Turbine_Price+Gearbox_Price+(2*Sluice_Price)+Generator_Price+Electrical_Price))+(Sluices*Sluice_Price) Running_Costs_Day = 823 #a day Total_Running_Costs = Running_Costs_Day*(Run_Time/86400) Energy_Price = 92.5/1000 #per kWh Turnover = (Total_Electrical_Energy/(3.6e+6))*Energy_Price Gross_Profit = Turnover-Total_Running_Costs Net_Profit = Gross_Profit-Startup_Cost Payback_Time = Startup_Cost/Gross_Profit print("Startup costs: " + str(Startup_Cost)) print("Running costs: " + str(Total_Running_Costs)) print("Turnover: " + str(Turnover)) print("Gross profit: " + str(Gross_Profit)) print("Net profit: " + str(Net_Profit)) print("Pay back time in years: " + str(Payback_Time)) if Graphs == True: plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() plt.title("Break-Even Analysis") ax.set_xlabel("Time (Years)") ax.set_ylabel("Equity (£)") Fixed_Costs_Plot = ax.plot([0,Payback_Time,2*Payback_Time], [Startup_Cost, Startup_Cost, Startup_Cost], "--", label="Fixed costs", color="red", linewidth=3) Total_Costs_Plot = ax.plot([0,Payback_Time,2*Payback_Time], [Startup_Cost, (Total_Running_Costs*Payback_Time + Startup_Cost), (Total_Running_Costs*Payback_Time*2 + Startup_Cost)], "--", label="Total costs", color="darkred", linewidth=3) Revenue_Plot = ax.plot([0,Payback_Time,2*Payback_Time], [0, (Turnover*Payback_Time), (Turnover*Payback_Time*2)], label="Revenue", color="blue", linewidth=3) Lines = Fixed_Costs_Plot+Total_Costs_Plot+Revenue_Plot Labels =[l.get_label() for l in Lines] ax.legend(Lines, Labels) plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) #Take into account maintencance downtimes if Graphs == True: plt.figure(figsize=plt.figaspect(1)*2) ax = plt.axes() second_ax = ax.twinx() plt.title("Lagoon Volume, Lagoon Height and Tide Height Vs Time (Double Effect)") ax.set_xlabel("Time (s)") ax.set_ylabel("Volume (m^3)") second_ax.set_ylabel('Height (m)') Filling_Plot = ax.plot(Global_Time, Global_Volume, label="Lagoon Volume", color="deepskyblue", linewidth=3) Lagoon_Head_Plot = second_ax.plot(Global_Time, Global_Head, "--", label="Lagoon height", color="green", linewidth=2) Tide_Height_Plot = second_ax.plot(Global_Time, Global_Tide, "--", label="Tide height", color ="blue", linewidth=2) # plt.title("Lagoon Volume, Lagoon Height and Tide Height Vs Time (Double Effect)", fontsize='30', weight='bold') # ax.set_xlabel("Time (s)", fontsize='30', weight='bold') # ax.set_ylabel('Height (m)', fontsize='30', weight='bold') # Lagoon_Head_Plot = ax.plot(Global_Time, Global_Head, "--", label="Lagoon height", color="green", linewidth=2) # Tide_Height_Plot = ax.plot(Global_Time, Global_Tide, "--", label="Tide height", color ="blue", linewidth=2) # ax.set_ylabel('Height (m)') if Graph_Head == True: Head_Difference_Plot = second_ax.plot(Global_Time, Global_Head_Difference, "--", label="Head difference", color ="orange", linewidth=2) Lines = Filling_Plot+Lagoon_Head_Plot+Tide_Height_Plot+Head_Difference_Plot else: Lines = Filling_Plot+Lagoon_Head_Plot+Tide_Height_Plot #Lines = Lagoon_Head_Plot+Tide_Height_Plot Labels =[l.get_label() for l in Lines] ax.legend(Lines, Labels) #ax.legend(Lines, Labels, fontsize='30') plt.minorticks_on() ax.grid(which='major', color='black', linestyle='-', linewidth=1) ax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) ax.set_ylim(0,2e7) # for tick in ax.xaxis.get_major_ticks(): # tick.label.set_fontsize(30) # tick.label.set_weight('bold') # # for tick in ax.yaxis.get_major_ticks(): # tick.label.set_fontsize(30) # tick.label.set_weight('bold') if Graph_QV == True: #Red graphs plt.figure(figsize=plt.figaspect(1)*2) QVax = plt.axes() second_QVax = QVax.twinx() plt.title("Velocity & Discharge Vs Time") QVax.set_xlabel("Time (s)") QVax.set_ylabel("Velocity (ms^-1)") second_QVax.set_ylabel('Discharge (m^3s^-1)') Velocity_Plot = QVax.plot(Global_Time, Global_Velocity, label="Flow velocity", color="red", linewidth=2) Discharge_Plot = second_QVax.plot(Global_Time, Global_Discharge, "--", label="Total discharge", color="maroon", linewidth=2) Lines = Velocity_Plot+Discharge_Plot Labels =[l.get_label() for l in Lines] QVax.legend(Lines, Labels) plt.minorticks_on() QVax.grid(which='major', color='black', linestyle='-', linewidth=1) QVax.grid(which='minor', color='black', linestyle='--', linewidth=0.5) if Graph_P == True: plt.figure(figsize=plt.figaspect(1)*2) Pax = plt.axes() plt.title("Mechanical & Electrical Power Vs Time") Pax.set_xlabel("Time (s)") Pax.set_ylabel("Power (w)") Mech_Power_Plot = Pax.plot(Global_Time, Global_Power, label="Mechanical Power", color="gold", linewidth=2) Elec_Power_Plot = Pax.plot(Global_Time, Global_Power_Elec, "--", label="Electrical Power", color="y", linewidth=2) Lines = Mech_Power_Plot+Elec_Power_Plot Labels =[l.get_label() for l in Lines] Pax.legend(Lines, Labels) plt.minorticks_on() Pax.grid(which='major', color='black', linestyle='-', linewidth=1) Pax.grid(which='minor', color='black', linestyle='--', linewidth=0.5)
def main(): assert_banknote_threshold(0) # parse args save = False arg_error = False if len(sys.argv) > 2: arg_error = True if len(sys.argv) == 2: if sys.argv[1] not in ["-s", "--save"]: arg_error = True else: save = True if arg_error: print_help(os.path.basename(sys.argv[0])) sys.exit(1) # get all directories (classes) in the test directory (dirpath, dirnames, _) = next(os.walk(os.path.join(TEST_DIR))) errors = [] # true positives true_positive_confidences = [] for dirname in dirnames: # ignore 'bg' class if dirname == 'bg': continue # for each directory (class), run the inference on all images images = load_base64(dirname, os.path.join(dirpath, dirname)) for (response_json, _) in predict(images): if response_json["status"] == "error": errors.append(f"{dirname}: {response_json['error_message']}") continue if response_json["status"] == "ok": if response_json["response"] == dirname: true_positive_confidences.append( response_json["confidence"] ) # false positives false_positive_confidences = [] images = load_base64('bg', os.path.join(dirpath, 'bg')) for (response_json, _) in predict(images): if response_json["status"] == "error": errors.append(f"bg: {response_json['error_message']}") continue if response_json["status"] == "ok": false_positive_confidences.append(response_json["confidence"]) if errors: print("\nErrors:") for error in errors: print(error) if save: print("\nSaving confidences...") with open(os.path.join(os.path.dirname(__file__), "output/tp.json"), "w") as out_file: out_file.write(json.dumps(true_positive_confidences)) with open(os.path.join(os.path.dirname(__file__), "output/fp.json"), "w") as out_file: out_file.write(json.dumps(false_positive_confidences)) print("\nPlotting results...") plt.boxplot([true_positive_confidences, false_positive_confidences], widths=0.7) plt.xticks([1, 2], ["True positives", "False positives"]) plt.minorticks_on() plt.grid(which='major', axis='y') plt.grid(which='minor', axis='y', linestyle=':', alpha=0.3) plt.ylim(0, 1) plt.ylabel("Confidence level") plt.title('Confidence statistics') plt.show()
label= r'$\mathcal{A}(\ell=%1.1f nm)=%3.2f, \,\,\, \mathcal{A}(\ell=%1.1f nm)=%3.2f$' % (1e9 * Ls[0], 1e21 * sum_A[0], 1e9 * Ls[1], 1e21 * sum_A[1])) pl.xlabel(x_ax) pl.ylabel(y_ax_par) pl.title(title('6', '5', 'parallel')) pl.legend(loc='best') pl.savefig(svfig('65pk', '65', 'loglog_parallel')) pl.show() # Semilog pl.figure() pl.semilogy( 1e9 * Ls, 1e21 * sum_A, label= r'$\mathcal{A}(\ell=%1.1f nm)=%3.2f, \,\,\, \mathcal{A}(\ell=%1.1f nm)=%3.2f$' % (1e9 * Ls[0], 1e21 * sum_A[0], 1e9 * Ls[1], 1e21 * sum_A[1])) pl.xlabel(x_ax) pl.ylabel(y_ax_per) pl.title(title('6', '5', 'parallel')) pl.legend(loc='best') pl.axis([0.0, 500, 1e1, 1e3]) pl.minorticks_on() pl.ticklabel_format(axis='both') pl.grid(which='both') pl.tick_params(which='both', labelright=True) pl.savefig(svfig('65pk', '65', 'parallel')) pl.legend(loc='best') pl.show()
def mlp(l_args, s_ticker, df_stock): parser = argparse.ArgumentParser(prog="mlp", description="""Multilayer Perceptron. """) parser.add_argument( "-d", "--days", action="store", dest="n_days", type=check_positive, default=5, help="prediction days.", ) parser.add_argument( "-i", "--input", action="store", dest="n_inputs", type=check_positive, default=40, help="number of days to use for prediction.", ) parser.add_argument( "-e", "--epochs", action="store", dest="n_epochs", type=check_positive, default=200, help="number of training epochs.", ) parser.add_argument( "-j", "--jumps", action="store", dest="n_jumps", type=check_positive, default=1, help="number of jumps in training data.", ) parser.add_argument( "-p", "--pp", action="store", dest="s_preprocessing", default="normalization", choices=["normalization", "standardization", "none"], help="pre-processing data.", ) parser.add_argument( "-o", "--optimizer", action="store", dest="s_optimizer", default="adam", choices=[ "adam", "adagrad", "adadelta", "adamax", "ftrl", "nadam", "optimizer", "rmsprop", "sgd", ], help="optimization technique.", ) parser.add_argument( "-l", "--loss", action="store", dest="s_loss", default="mae", choices=["mae", "mape", "mse", "msle"], help="loss function.", ) try: ns_parser = parse_known_args_and_warn(parser, l_args) # Pre-process data if ns_parser.s_preprocessing == "standardization": scaler = StandardScaler() stock_train_data = scaler.fit_transform( np.array(df_stock["5. adjusted close"].values.reshape(-1, 1))) elif ns_parser.s_preprocessing == "normalization": scaler = MinMaxScaler() stock_train_data = scaler.fit_transform( np.array(df_stock["5. adjusted close"].values.reshape(-1, 1))) else: # No pre-processing stock_train_data = np.array( df_stock["5. adjusted close"].values.reshape(-1, 1)) # Split training data for the neural network stock_x, stock_y = splitTrain.split_train( stock_train_data, ns_parser.n_inputs, ns_parser.n_days, numJumps=ns_parser.n_jumps, ) stock_x = np.array(stock_x) stock_x = np.reshape(stock_x, (stock_x.shape[0], stock_x.shape[1])) stock_y = np.array(stock_y) stock_y = np.reshape(stock_y, (stock_y.shape[0], stock_y.shape[1])) # Build Neural Network model model = build_neural_network_model(cfg_nn_models.MultiLayer_Perceptron, ns_parser.n_inputs, ns_parser.n_days) model.compile(optimizer=ns_parser.s_optimizer, loss=ns_parser.s_loss) # Train our model model.fit(stock_x, stock_y, epochs=ns_parser.n_epochs, verbose=1) print("") print(model.summary()) print("") # Prediction yhat = model.predict( stock_train_data[-ns_parser.n_inputs:].reshape( 1, ns_parser.n_inputs), verbose=0, ) # Re-scale the data back if (ns_parser.s_preprocessing == "standardization") or (ns_parser.s_preprocessing == "normalization"): y_pred_test_t = scaler.inverse_transform(yhat.tolist()) else: y_pred_test_t = yhat l_pred_days = get_next_stock_market_days( last_stock_day=df_stock["5. adjusted close"].index[-1], n_next_days=ns_parser.n_days, ) df_pred = pd.Series(y_pred_test_t[0].tolist(), index=l_pred_days, name="Price") # Plotting plt.figure() plt.plot(df_stock.index, df_stock["5. adjusted close"], lw=3) plt.title(f"MLP on {s_ticker} - {ns_parser.n_days} days prediction") plt.xlim(df_stock.index[0], get_next_stock_market_days(df_pred.index[-1], 1)[-1]) plt.xlabel("Time") plt.ylabel("Share Price ($)") plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) plt.plot( [df_stock.index[-1], df_pred.index[0]], [df_stock["5. adjusted close"].values[-1], df_pred.values[0]], lw=1, c="tab:green", linestyle="--", ) plt.plot(df_pred.index, df_pred, lw=2, c="tab:green") plt.axvspan(df_stock.index[-1], df_pred.index[-1], facecolor="tab:orange", alpha=0.2) _, _, ymin, ymax = plt.axis() plt.vlines( df_stock.index[-1], ymin, ymax, colors="k", linewidth=3, linestyle="--", color="k", ) plt.ion() plt.show() # Print prediction data print_pretty_prediction(df_pred, df_stock["5. adjusted close"].values[-1]) print("") except Exception as e: print(e) print("")
def getLSF(telluric_data, alpha=1.0, continuum=True,test=False,save_path=None): """ Return a best LSF value from a telluric data. """ lsf_list = [] test_lsf = np.arange(3.0,13.0,0.1) data = copy.deepcopy(telluric_data) if continuum is True: data = nsp.continuumTelluric(data=data) data.flux **= alpha for i in test_lsf: telluric_model = nsp.convolveTelluric(i,data) if telluric_data.order == 59: telluric_model.flux **= 3 # mask hydrogen absorption feature data2 = copy.deepcopy(data) tell_mdl = copy.deepcopy(telluric_model) mask_pixel = 450 data2.wave = data2.wave[mask_pixel:] data2.flux = data2.flux[mask_pixel:] data2.noise = data2.noise[mask_pixel:] tell_mdl.wave = tell_mdl.wave[mask_pixel:] tell_mdl.flux = tell_mdl.flux[mask_pixel:] chisquare = nsp.chisquare(data2,tell_mdl) else: chisquare = nsp.chisquare(data,telluric_model) lsf_list.append([chisquare,i]) if test is True: plt.plot(telluric_model.wave,telluric_model.flux+(i-3)*10+1, 'r-',alpha=0.5) if test is True: plt.plot(data.wave,data.flux, 'k-',label='telluric data',alpha=0.5) plt.title("Test LSF",fontsize=15) plt.xlabel("Wavelength ($\AA$)",fontsize=12) plt.ylabel("Transmission + Offset",fontsize=12) plt.minorticks_on() if save_path is not None: plt.savefig(save_path+\ "/{}_O{}_lsf_data_mdl.png"\ .format(data.name, data.order)) #plt.show() plt.close() fig, ax = plt.subplots() for i in range(len(lsf_list)): ax.plot(lsf_list[i][1],lsf_list[i][0],'k.',alpha=0.5) ax.plot(min(lsf_list)[1],min(lsf_list)[0],'r.', label="best LSF {} km/s".format(min(lsf_list)[1])) ax.set_xlabel("LSF (km/s)",fontsize=12) ax.set_ylabel("$\chi^2$",fontsize=11) plt.minorticks_on() plt.legend(fontsize=10) if save_path is not None: plt.savefig(save_path+\ "/{}_O{}_lsf_chi2.png"\ .format(data.name, data.order)) #plt.show() plt.close() lsf = min(lsf_list)[1] if telluric_data.order == 61 or telluric_data.order == 62 \ or telluric_data.order == 63: #or telluric_data.order == 64: lsf = 5.5 print("The LSF is obtained from orders 60 and 65 (5.5 km/s).") return lsf
def arima(l_args, s_ticker, s_interval, df_stock): parser = argparse.ArgumentParser( prog='arima', description="""In statistics and econometrics, and in particular in time series analysis, an autoregressive integrated moving average (ARIMA) model is a generalization of an autoregressive moving average (ARMA) model. Both of these models are fitted to time series data either to better understand the data or to predict future points in the series (forecasting). ARIMA(p,d,q) where parameters p, d, and q are non-negative integers, p is the order (number of time lags) of the autoregressive model, d is the degree of differencing (the number of times the data have had past values subtracted), and q is the order of the moving-average model.""" ) parser.add_argument('-d', "--days", action="store", dest="n_days", type=check_positive, default=5, help='prediction days.') parser.add_argument('-i', "--ic", action="store", dest="s_ic", type=str, default='aic', choices=['aic', 'aicc', 'bic', 'hqic', 'oob'], help='information criteria.') parser.add_argument('-s', "--seasonal", action="store_true", default=False, dest="b_seasonal", help='Use weekly seasonal data.') parser.add_argument('-o', "--order", action="store", dest="s_order", type=str, help='arima model order (p,d,q) in format: pdq.') parser.add_argument('-r', "--results", action="store_true", dest="b_results", default=False, help='results about ARIMA summary flag.') (ns_parser, l_unknown_args) = parser.parse_known_args(l_args) if l_unknown_args: print( f"The following args couldn't be interpreted: {l_unknown_args}\n") return # Machine Learning model if ns_parser.s_order: t_order = tuple([int(ord) for ord in list(ns_parser.s_order)]) model = ARIMA(df_stock['5. adjusted close'].values, order=t_order).fit() l_predictions = model.predict( start=len(df_stock['5. adjusted close']) + 1, end=len(df_stock['5. adjusted close']) + ns_parser.n_days) else: if ns_parser.b_seasonal: model = pmdarima.auto_arima(df_stock['5. adjusted close'].values, error_action='ignore', seasonal=True, m=5, information_criteria=ns_parser.s_ic) else: model = pmdarima.auto_arima(df_stock['5. adjusted close'].values, error_action='ignore', seasonal=False, information_criteria=ns_parser.s_ic) l_predictions = model.predict(n_periods=ns_parser.n_days) # Prediction data l_pred_days = get_next_stock_market_days( last_stock_day=df_stock['5. adjusted close'].index[-1], n_next_days=ns_parser.n_days) df_pred = pd.Series(l_predictions, index=l_pred_days, name='Price') if ns_parser.b_results: print(model.summary()) print("") # Plotting plt.plot(df_stock.index, df_stock['5. adjusted close'], lw=2) if ns_parser.s_order: plt.title( f"ARIMA {str(t_order)} on {s_ticker} - {ns_parser.n_days} days prediction" ) else: plt.title( f"ARIMA {model.order} on {s_ticker} - {ns_parser.n_days} days prediction" ) plt.xlim(df_stock.index[0], get_next_stock_market_days(df_pred.index[-1], 1)[-1]) plt.xlabel('Time') plt.ylabel('Share Price ($)') plt.grid(b=True, which='major', color='#666666', linestyle='-') plt.minorticks_on() plt.grid(b=True, which='minor', color='#999999', linestyle='-', alpha=0.2) plt.plot([df_stock.index[-1], df_pred.index[0]], [df_stock['5. adjusted close'].values[-1], df_pred.values[0]], lw=1, c='tab:green', linestyle='--') plt.plot(df_pred.index, df_pred, lw=2, c='tab:green') plt.axvspan(df_stock.index[-1], df_pred.index[-1], facecolor='tab:orange', alpha=0.2) xmin, xmax, ymin, ymax = plt.axis() plt.vlines(df_stock.index[-1], ymin, ymax, linewidth=1, linestyle='--', color='k') plt.show() # Print prediction data print("Predicted share price:") df_pred = df_pred.apply(lambda x: f"{x:.2f} $") print(df_pred.to_string()) print("")
def plot_scale_height_distribution(dataframe, cut=None, title=None, histcolor=plt.cm.Blues, savename=None): if type(cut) != type(None): dataframe = dataframe[cut] ## calculate and store scale heights for a given selection of stars dz = 0.1 dr1 = 0.5 dr2 = 1.0 dr3 = 2.0 dr4 = 5.0 rbins1 = np.arange(0.0, 7, dr1) rbins2 = np.arange(7, 10, dr2) rbins3 = np.arange(10, 20, dr3) rbins4 = np.arange(20, 40 + dr4, dr4) scalerads = np.concatenate((rbins1, rbins2, rbins3, rbins4)) zbins = np.arange(0, 3 + dz, dz) scaleheights1 = np.zeros(len(scalerads) - 1) scaleheights2 = np.zeros(len(scalerads) - 1) norms1 = np.zeros(len(scalerads) - 1) norms2 = np.zeros(len(scalerads) - 1) err_array = np.zeros(len(scalerads) - 1) for ii in range(len(scalerads) - 1): cut = (dataframe.R > scalerads[ii]) & (dataframe.R < scalerads[ii + 1]) err_array[ii] = (scalerads[ii + 1] - scalerads[ii]) / 2. zvals = dataframe.Z[cut] coeffs = exponential_fit_coeffs(zvals) scaleheights1[ii] = coeffs[0] scaleheights2[ii] = coeffs[2] norms1[ii] = coeffs[1] norms2[ii] = coeffs[3] for ii in range(len(scaleheights1)): if str(scaleheights1[ii]) == 'nan': scaleheights1[ii] = 0 else: scaleheights1[ii] = 1. / abs(scaleheights1[ii]) if str(scaleheights2[ii]) == 'nan': scaleheights2[ii] = 0 else: scaleheights2[ii] = 1. / abs(scaleheights2[ii]) ## plot scale height distribution dr = 0.25 dz = 0.1 radbins = np.arange(0, 40 + dr, dr) zbins = np.arange(0, 5 + dz, dz) fig = plt.figure(figsize=(12, 6)) H, xed, yed = np.histogram2d(dataframe.R, abs(dataframe.Z), bins=(radbins, zbins)) extent = [xed[0], xed[-1], yed[0], yed[-1]] ax = plt.subplot(111) im = plt.imshow(np.log10(H.T), extent=extent, origin='lower', aspect='auto', interpolation='nearest', cmap=histcolor) plt.scatter(scalerads[6:-1] + err_array[6:], scaleheights1[6:], c='r') plt.errorbar(scalerads[6:-1] + err_array[6:], scaleheights1[6:], xerr=err_array[6:], linewidth=0, ecolor='r', elinewidth=1) plt.scatter(scalerads[6:-1] + err_array[6:], scaleheights2[6:], c='#00FF40') plt.errorbar(scalerads[6:-1] + err_array[6:], scaleheights2[6:], xerr=err_array[6:], linewidth=0, ecolor='#00FF40', elinewidth=1) plt.fill_between([0, 3], [-1, -1], [3, 3], color='k', alpha=0.75) plt.minorticks_on() plt.xlabel('Radius (kpc)') plt.ylabel('Z (kpc)') plt.xlim(0, 40) plt.ylim(-0.05, 3.) cax = fig.add_axes([0.91, 0.125, 0.03, 0.775]) cbar = fig.colorbar(im, cax=cax) cbar.ax.set_ylabel('log$_{10}$(N)') ax.minorticks_on() if savename != None: plt.savefig(savename) plt.show()
def plot_xy_with_subregions(dataframe, cut=None, slopes=None, title=None, savename=None, contours=False): if type(cut) != type(None): dataframe = dataframe[cut] x = dataframe.X y = dataframe.Y z = dataframe.Z if slopes == None: slopes = [-np.inf, -1, 1, np.inf] dlev = 0.75 levs = np.arange(0, 3.5 + dlev, dlev) ## Plot to confirm dh = 401 xbins = np.linspace(-100, 100, dh) ybins = np.linspace(-100, 100, dh) fig = plt.figure(figsize=(6, 6)) if title != None: plt.suptitle(title) ## contours if contours == True: H, xedges, yedges = np.histogram2d(x, y, bins=(xbins, ybins)) extent = [xedges[0], xedges[-1], yedges[0], yedges[-1]] im = plt.contour(np.log10(H.T), extent=extent, cmap=plt.cm.Reds_r, levels=levs) plt.subplot(111) plt.scatter(x, y, s=1, edgecolor='None', c='k') the_colors = [ '#00FF40', 'b', 'r', 'y', 'orange', '#00FF40', 'b', 'r', 'y', 'orange' ] for ii in range(len(slopes) - 1): slope1 = slopes[ii] slope2 = slopes[ii + 1] select = (y > slope1 * x) & (y < slope2 * x) ## sample plt.scatter(x[select], y[select], s=1, edgecolor='None', c=the_colors[ii]) plt.scatter(the_sun.X, the_sun.Y, s=50, c='y') ## crosshairs on (0,0) plt.plot([-1E5, 1E5], [0, 0], linestyle='--', color='grey') plt.plot([0, 0], [-1E5, 1E5], linestyle='--', color='grey') plt.xlim(-39, 39) ## Flip the Y-axis plt.ylim(39, -39) plt.xlabel('X (kpc)') plt.ylabel('Y (kpc)') plt.minorticks_on() if savename != None: plt.savefig(savename) plt.show()
b0 = popt0[1] # Intercept value yy = m0 * x + b0 m, b = Gradient_descent(x, y) # Inacting the Gradient decent above Y = Line(x, m, b) # For Gradient decent calc # Plotting plt.scatter(x, y, color="red", label='Real data', marker=".") # Plot Real Data plt.title("Linear Model using Gradient descent") # Plot title plt.xlabel('Age of guest (Years)') # Plot x axis name plt.ylabel('Fare (pounds)') # Plot y axis label plt.grid() # Plot grid lines plt.plot(x, Y, color="blue", label='Grad line') # Plot Best fit line for Gradient Des plt.plot(x, yy, color="green", label='Scipy line') # Plot Best fit line from Scipy plt.legend() plt.minorticks_on() # adds small ticks on main axis """ Your turn The scipy line is the best way to get an accurate line and we see that the Grad and Scipy line are similar. Up the interations of the Gradient descent model what do you see happen? Up the learning rate of the Gradient descent model what do you see happen? Try Gradient Descent on x = sib and y = parents # BONUS used One Hot encoder to compare gender = x to fare = y """
def plot_knn_f1scores(plot_label=''): # Plots F1-score for each source from the nearest neighbours found using knn_closest. Input is a list of indices. # If dim==1 knn found in 1-D. If dim==10, knn found in 10-D. (see later half of this function for details) # Choose to plot as function of 1D feature or r magnitude. # Load output from previous run: print('Loading knn indices from previous run saved on disk...') filename1d = 'knn_f1scores_1D' filename10d = 'knn_f1scores_10D' try: knn_f1scores_1d = load_obj(filename1d) knn_f1scores_10d = load_obj(filename10d) except: print( 'Failed to load knn_f1scores_*.pkl from disk - did you run "get_knn_accuracy()" yet?' ) exit() # combine list of dicts into single dictionary knn_f1scores_1d = { k: [d.get(k) for d in knn_f1scores_1d] for k in {k for d in knn_f1scores_1d for k in d} } knn_f1scores_10d = { k: [d.get(k) for d in knn_f1scores_10d] for k in {k for d in knn_f1scores_10d for k in d} } df1d = pd.DataFrame(knn_f1scores_1d) df10d = pd.DataFrame(knn_f1scores_10d) # 1D df1d_g = df1d[[ 'galaxy_xvar_mean', 'galaxy_xvar_std', 'galaxy_probs_mean', 'galaxy_probs_std', 'f1g', 'f1gerr', 'correct_source' ]].copy() df1d_q = df1d[[ 'quasar_xvar_mean', 'quasar_xvar_std', 'quasar_probs_mean', 'quasar_probs_std', 'f1q', 'f1qerr', 'correct_source' ]].copy() df1d_s = df1d[[ 'star_xvar_mean', 'star_xvar_std', 'star_probs_mean', 'star_probs_std', 'f1s', 'f1serr', 'correct_source' ]].copy() df1d_g['class'] = 'GALAXY' df1d_g.columns = [ 'feature1d_mean', 'feature1d_std', 'probs_mean', 'probs_std', 'f1', 'f1err', 'correct_source', 'class' ] df1d_q['class'] = 'QSO' df1d_q.columns = [ 'feature1d_mean', 'feature1d_std', 'probs_mean', 'probs_std', 'f1', 'f1err', 'correct_source', 'class' ] df1d_s['class'] = 'STAR' df1d_s.columns = [ 'feature1d_mean', 'feature1d_std', 'probs_mean', 'probs_std', 'f1', 'f1err', 'correct_source', 'class' ] df_all_1d = pd.concat([df1d_g, df1d_q, df1d_s], axis=0) df_all_1d['class'] = df_all_1d['class'].astype( 'category') # datashader wants categorical class df10d_g = df10d[[ 'galaxy_xvar_mean', 'galaxy_xvar_std', 'galaxy_probs_mean', 'galaxy_probs_std', 'f1g', 'f1gerr', 'correct_source' ]].copy() df10d_q = df10d[[ 'quasar_xvar_mean', 'quasar_xvar_std', 'quasar_probs_mean', 'quasar_probs_std', 'f1q', 'f1qerr', 'correct_source' ]].copy() df10d_s = df10d[[ 'star_xvar_mean', 'star_xvar_std', 'star_probs_mean', 'star_probs_std', 'f1s', 'f1serr', 'correct_source' ]].copy() df10d_g['class'] = 'GALAXY' df10d_g.columns = [ 'feature10d_mean', 'feature10d_std', 'probs_mean', 'probs_std', 'f1', 'f1err', 'correct_source', 'class' ] df10d_q['class'] = 'QSO' df10d_q.columns = [ 'feature10d_mean', 'feature10d_std', 'probs_mean', 'probs_std', 'f1', 'f1err', 'correct_source', 'class' ] df10d_s['class'] = 'STAR' df10d_s.columns = [ 'feature10d_mean', 'feature10d_std', 'probs_mean', 'probs_std', 'f1', 'f1err', 'correct_source', 'class' ] df_all_10d = pd.concat([df10d_g, df10d_q, df10d_s], axis=0) df_all_10d['class'] = df_all_10d['class'].astype( 'category') # datashader wants categorical class # Did we fit the knn in 1-D or in 10-D? # In 1-D a few thousand nearest neighbours will likely be a healthy mix of the 3 classes throughout most/all of the feature space. So you will get reliable numbers for F1 scores per class (perhaps with differring error bars). These are basically a round-about way of getting F1 scores shown in the histogram created by the function plot_histogram_matrix_f1. It is nice they agree (they most definately should). The mannor in which they agree is interesting - since knn effectively uses variable bin widths to get enough nearest neighbours, whilst plot_histogram_matrix_f1 uses fixed bin widths and averages within that bin. # select correct sources only? # Only plot f1-score for correct object type in question. e.g. If it's a galaxy, nearest 10000 objects will likely only be galaxies, so f1 for star and quasar will be very poor or zero because there are no True Positives in this area of 1-D feature space. In 1-D feature space the 10000 nearest neighbours were a healthy mix of all three classes so we didn't have this problem. print(df_all_1d.correct_source.value_counts()) print(df_all_10d.correct_source.value_counts()) df_all_1d = df_all_1d[df_all_1d.correct_source == 1] df_all_10d = df_all_10d[df_all_10d.correct_source == 1] # only 5000 sources are wrong, not so bad. # Create datashader pngs for each plot, since we have too much data for matplotlib to handle # 1D - 1dfeature vs f1 xmin1d = df1d.star_xvar_mean.min() - 0.1 # padd for plotting later xmax1d = df1d.star_xvar_mean.max() + 0.1 print(xmin1d, xmax1d) ymin = 0 ymax = 1.05 cvs = ds.Canvas(plot_width=1000, plot_height=600, x_range=(xmin1d, xmax1d), y_range=(ymin, ymax), x_axis_type='linear', y_axis_type='linear') agg = cvs.points(df_all_1d, 'feature1d_mean', 'f1', ds.count_cat('class')) ckey = dict(GALAXY=(101, 236, 101), QSO='hotpink', STAR='dodgerblue') img = tf.shade(agg, color_key=ckey, how='log') export_image(img, 'knn1d_1d_vs_f1', fmt='.png', background='white') # 10D - 1dfeature vs f1 xmin10d = df10d.star_xvar_mean.min() - 0.1 # padd for plotting later xmax10d = df10d.star_xvar_mean.max() + 0.1 print(xmin10d, xmax10d) ymin = 0 ymax = 1.05 cvs = ds.Canvas(plot_width=200, plot_height=120, x_range=(xmin10d, xmax10d), y_range=(ymin, ymax), x_axis_type='linear', y_axis_type='linear') agg = cvs.points(df_all_10d, 'feature10d_mean', 'f1', ds.count_cat('class')) ckey = dict(GALAXY=(101, 236, 101), QSO='hotpink', STAR='dodgerblue') img = tf.shade(agg, color_key=ckey, how='log') export_image(img, 'knn10d_1d_vs_f1', fmt='.png', background='white') # 1D - prob vs f1 xmin1d_probs = 0 # padd for plotting later xmax1d_probs = 1.05 ymin = 0 ymax = 1.05 cvs = ds.Canvas(plot_width=300, plot_height=300, x_range=(xmin1d_probs, xmax1d_probs), y_range=(ymin, ymax), x_axis_type='linear', y_axis_type='linear') agg = cvs.points(df_all_1d, 'probs_mean', 'f1', ds.count_cat('class')) ckey = dict(GALAXY=(101, 236, 101), QSO='hotpink', STAR='dodgerblue') img = tf.shade(agg, color_key=ckey, how='log') export_image(img, 'knn1d_probs_vs_f1', fmt='.png', background='white') # 10D - 1dfeature vs f1 xmin10d_probs = 0 # padd for plotting later xmax10d_probs = 1.05 ymin = 0 ymax = 1.05 cvs = ds.Canvas(plot_width=200, plot_height=200, x_range=(xmin10d_probs, xmax10d_probs), y_range=(ymin, ymax), x_axis_type='linear', y_axis_type='linear') agg = cvs.points(df_all_10d, 'probs_mean', 'f1', ds.count_cat('class')) ckey = dict(GALAXY=(101, 236, 101), QSO='hotpink', STAR='dodgerblue') img = tf.shade(agg, color_key=ckey, how='log') export_image(img, 'knn10d_probs_vs_f1', fmt='.png', background='white') # ----------------- plotting ----------------- # get datashader pngs, and plot a small sample of points over the top to guide eye with error bars. img_1d_1d = mpimg.imread('knn1d_1d_vs_f1.png') img_1d_probs = mpimg.imread('knn1d_probs_vs_f1.png') mpl.rcParams.update({'font.size': 10}) markeredgewidth = 0.5 mew = 0.5 elinewidth = 0.5 fig, axs = plt.subplots(1, 2, figsize=(14.5, 4)) # --- 1D --- 1d --- plt.sca(axs[0]) plt.imshow(img_1d_1d, extent=[xmin1d, xmax1d, ymin * 10, ymax * 10]) # make yaxis 10 times larger # fix ylabels after scaling the axis ylabels = axs[0].get_yticks() new_ylabels = [l / 10 for l in ylabels] # account for factor of 10 increase axs[0].set_yticklabels(new_ylabels) axs[0].xaxis.set_major_formatter(FormatStrFormatter('%.1f')) # plot sample over the top to get a feel for error bars samp = 2500 plt.errorbar(df1d_g[0::samp]['feature1d_mean'], df1d_g[0::samp]['f1'] * 10, xerr=df1d_g[0::samp]['feature1d_std'], yerr=df1d_g[0::samp]['f1err'] * 10, color=galaxy_c, elinewidth=elinewidth, markeredgewidth=mew, ls='none', label='Galaxies') plt.errorbar(df1d_q[0::samp]['feature1d_mean'], df1d_q[0::samp]['f1'] * 10, xerr=df1d_q[0::samp]['feature1d_std'], yerr=df1d_q[0::samp]['f1err'] * 10, color=quasar_c, elinewidth=elinewidth, markeredgewidth=mew, ls='none', label='Quasars') plt.errorbar(df1d_s[0::samp]['feature1d_mean'], df1d_s[0::samp]['f1'] * 10, xerr=df1d_s[0::samp]['feature1d_std'], yerr=df1d_s[0::samp]['f1err'] * 10, color=star_c, elinewidth=elinewidth, markeredgewidth=mew, ls='none', label='Stars') plt.tick_params(axis='y', which='both', right=True) plt.minorticks_on() plt.xlabel('1D feature') plt.ylabel('F1 score in 1 dimensions') #axs[1].text(0.95, 0.01, 'calculated from 10000 nearest neighbours in 10 dimensions', verticalalignment='bottom', horizontalalignment='right', transform=axs[1].transAxes, color='black', fontsize=8) plt.xlim(-7, 12.5) plt.legend(frameon=False, loc='lower right') plt.tight_layout() fig.tight_layout() # --- 1D --- probs --- plt.sca(axs[1]) xf = 2 plt.imshow(img_1d_probs, extent=[xmin1d_probs * xf, xmax1d_probs * xf, ymin, ymax]) # make xaxis larger # fix ylabels after scaling the axis #xlabels = axs[0].get_xticks() #new_xlabels = [l/xf for l in xlabels] # account for scaling axis axs[1].set_xticks(np.arange(0, 2.1, step=0.2)) axs[1].set_xticklabels(np.arange(0, 1.1, step=0.1)) #axs[0].xaxis.set_major_formatter(FormatStrFormatter('%.1f')) # doesn't work # getting some labels with 8 F****** decimal places without these two lines: labels = [item.get_text() for item in axs[1].get_xticklabels()] axs[1].set_xticklabels([str(round(float(label), 2)) for label in labels]) # plot sample over the top to get a feel for error bars df1d_g2 = df1d_g[(df1d_g.f1 < 0.85) & (df1d_g.probs_mean < 0.85)][0::3000] plt.errorbar(df1d_g2['probs_mean'] * xf, df1d_g2['f1'], xerr=df1d_g2['probs_std'] * xf, yerr=df1d_g2['f1err'], color=galaxy_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Galaxies') df1d_q2 = df1d_q[(df1d_q.f1 < 0.85) & (df1d_q.probs_mean < 0.85)][0::3000] plt.errorbar(df1d_q2['probs_mean'] * xf, df1d_q2['f1'], xerr=df1d_q2['probs_std'] * xf, yerr=df1d_q2['f1err'], color=quasar_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Quasars') df1d_q2 = df1d_q[(df1d_q.f1 < 0.85) & (df1d_q.probs_mean < 0.75)][ 0::800] # plot more at lower values in undersampled region plt.errorbar(df1d_q2['probs_mean'] * xf, df1d_q2['f1'], xerr=df1d_q2['probs_std'] * xf, yerr=df1d_q2['f1err'], color=quasar_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew) df1d_s2 = df1d_s[(df1d_s.f1 < 0.85) & (df1d_s.probs_mean < 0.85)][0::3000] plt.errorbar(df1d_s2['probs_mean'] * xf, df1d_s2['f1'], xerr=df1d_s2['probs_std'] * xf, yerr=df1d_s2['f1err'], color=star_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Stars') plt.tick_params(axis='y', which='both', right=True) plt.minorticks_on() plt.xlabel('Classification probability') plt.ylabel('F1 score in 1 dimension') #axs[0].text(0.95, 0.01, 'calculated from 10000 nearest neighbours in 1 dimension', verticalalignment='bottom', horizontalalignment='right', transform=axs[0].transAxes, color='black', fontsize=8) #plt.xlim(0.66,2) plt.tight_layout() #fig.subplots_adjust(wspace=0.1, hspace=0.1) # Must come after tight_layout to work! ... doesn't seem to work when using imshow :( fig.savefig('knn_plot_1D' + plot_label + '.pdf') plt.clf() # ---------------- 10-d ---------------- # ----------------- plotting ----------------- elinewidth = 0.2 mpl.rcParams.update({'font.size': 10}) # else its really small in the paper img_10d_1d = mpimg.imread('knn10d_1d_vs_f1.png') img_10d_probs = mpimg.imread('knn10d_probs_vs_f1.png') fig, axs = plt.subplots(1, 2, figsize=(14.5, 4)) xf = 2 # make x-axis twice as long as y. # --- 10D --- plt.sca(axs[0]) plt.imshow(img_10d_1d, extent=[xmin10d, xmax10d, ymin * 10, ymax * 10]) # make yaxis 10 times larger # fix ylabels after scaling the axis ylabels = axs[0].get_yticks() new_ylabels = [l / 10 for l in ylabels] # account for factor of 10 increase axs[0].set_yticklabels(new_ylabels) axs[0].xaxis.set_major_formatter(FormatStrFormatter('%.1f')) # plot sample over the top to get a feel for error bars df10d_g2 = df10d_g[df10d_g.f1 < 0.95][ 0:: 500] # only plot error bars below 0.95 because above this they are v small. plt.errorbar(df10d_g2['feature10d_mean'], df10d_g2['f1'] * 10, xerr=df10d_g2['feature10d_std'], yerr=df10d_g2['f1err'] * 10, color=galaxy_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Galaxies') df10d_q2 = df10d_q[df10d_q.f1 < 0.95][0::500] plt.errorbar(df10d_q2['feature10d_mean'], df10d_q2['f1'] * 10, xerr=df10d_q2['feature10d_std'], yerr=df10d_q2['f1err'] * 10, color=quasar_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Quasars') df10d_s2 = df10d_s[df10d_s.f1 < 0.95][0::500] plt.errorbar(df10d_s2['feature10d_mean'], df10d_s2['f1'] * 10, xerr=df10d_s2['feature10d_std'], yerr=df10d_s2['f1err'] * 10, color=star_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Stars') plt.tick_params(axis='y', which='both', right=True) plt.minorticks_on() plt.xlabel('1D feature') plt.ylabel('F1 score in 10 dimensions') #axs[1].text(0.95, 0.01, 'calculated from 10000 nearest neighbours in 10 dimensions', verticalalignment='bottom', horizontalalignment='right', transform=axs[1].transAxes, color='black', fontsize=8) plt.xlim(-7, 12.5) plt.tight_layout() # --- 10D --- probs --- plt.sca(axs[1]) plt.imshow(img_10d_probs, extent=[xmin10d_probs * xf, xmax10d_probs * xf, ymin, ymax]) # make xaxis larger # fix ylabels after scaling the axis #xlabels = axs[1].get_xticks() #new_xlabels = [l/xf for l in xlabels] # account for scaling axis #axs[1].set_xticklabels(new_xlabels) axs[1].set_xticks(np.arange(0, 2.1, step=0.2)) axs[1].set_xticklabels(np.arange(0, 1.1, step=0.1)) #axs[0].xaxis.set_major_formatter(FormatStrFormatter('%.1f')) # doesn't work labels = [item.get_text() for item in axs[1].get_xticklabels()] axs[1].set_xticklabels([str(round(float(label), 2)) for label in labels]) # plot sample over the top to get a feel for error bars df10d_g2 = df10d_g[(df10d_g.f1 < 0.85) & ( df10d_g.probs_mean < 0.85 )][0:: 1000] # only plot error bars below 0.95 because above this they are v small, and overcrowd the plot. plt.errorbar(df10d_g2['probs_mean'] * xf, df10d_g2['f1'], xerr=df10d_g2['probs_std'] * xf, yerr=df10d_g2['f1err'], color=galaxy_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Galaxy') df10d_q2 = df10d_q[(df10d_q.f1 < 0.85) & (df10d_q.probs_mean < 0.85)][0::1000] plt.errorbar(df10d_q2['probs_mean'] * xf, df10d_q2['f1'], xerr=df10d_q2['probs_std'] * xf, yerr=df10d_q2['f1err'], color=quasar_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Quasar') df10d_s2 = df10d_s[(df10d_s.f1 < 0.85) & (df10d_s.probs_mean < 0.85)][0::1000] plt.errorbar(df10d_s2['probs_mean'] * xf, df10d_s2['f1'], xerr=df10d_s2['probs_std'] * xf, yerr=df10d_s2['f1err'], color=star_c, elinewidth=elinewidth, ls='none', markeredgewidth=mew, label='Star') plt.tick_params(axis='y', which='both', right=True) plt.minorticks_on() plt.xlabel('Classification probability') plt.ylabel('F1 score in 10 dimensions') plt.legend(frameon=False, loc='upper left') #axs[1].text(0.95, 0.01, 'calculated from 10000 nearest neighbours in 10 dimensions', verticalalignment='bottom', horizontalalignment='right', transform=axs[1].transAxes, color='black', fontsize=8) plt.tight_layout() fig.tight_layout() #plt.xlim(0.66,2) fig.savefig('knn_plot_10D' + plot_label + '.pdf')
def obv(l_args, s_ticker, s_interval, df_stock): parser = argparse.ArgumentParser( add_help=False, prog="obv", description=""" The On Balance Volume (OBV) is a cumulative total of the up and down volume. When the close is higher than the previous close, the volume is added to the running total, and when the close is lower than the previous close, the volume is subtracted from the running total. \n \n To interpret the OBV, look for the OBV to move with the price or precede price moves. If the price moves before the OBV, then it is a non-confirmed move. A series of rising peaks, or falling troughs, in the OBV indicates a strong trend. If the OBV is flat, then the market is not trending. """, ) parser.add_argument( "-o", "--offset", action="store", dest="n_offset", type=check_positive, default=0, help="offset", ) try: ns_parser = parse_known_args_and_warn(parser, l_args) if not ns_parser: return # Daily if s_interval == "1440min": df_ta = ta.obv( close=df_stock["5. adjusted close"], volume=df_stock["6. volume"], offset=ns_parser.n_offset, ).dropna() # Intraday else: df_ta = ta.obv( close=df_stock["4. close"], volume=df_stock["5. volume"], offset=ns_parser.n_offset, ).dropna() plt.figure(figsize=plot_autoscale(), dpi=PLOT_DPI) axPrice = plt.subplot(211) if s_interval == "1440min": plt.plot(df_stock.index, df_stock["5. adjusted close"].values, "k", lw=2) else: plt.plot(df_stock.index, df_stock["4. close"].values, "k", lw=2) plt.title(f"On-Balance Volume (OBV) on {s_ticker}") plt.xlim(df_stock.index[0], df_stock.index[-1]) plt.ylabel("Share Price ($)") plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) _ = axPrice.twinx() if s_interval == "1440min": plt.bar( df_stock.index, df_stock["6. volume"].values, color="k", alpha=0.8, width=0.3, ) else: plt.bar( df_stock.index, df_stock["5. volume"].values, color="k", alpha=0.8, width=0.3, ) plt.subplot(212) plt.plot(df_ta.index, df_ta.values, "b", lw=1) plt.xlim(df_stock.index[0], df_stock.index[-1]) plt.legend(["OBV"]) plt.xlabel("Time") plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) if gtff.USE_ION: plt.ion() plt.show() print("") except Exception as e: print(e) print("")
#Set delta for linmix array which says what are upper limits. 1 is a measured value. 0 is an upper limit. #Brooke's galaxies have measured B/T, SDSS are all upper limits and INT values have 2 measured, 3 upper limits (set to a B/T =1) delta = N.append(N.ones(len(brooke_mtot)), N.append(N.zeros(len(bt[is_sdss])), int_delta)) # plot a histogram of B/T ratios, just a check (as almost all, but not all, are upper limits) P.figure(figsize=(6, 3)) P.hist(N.append(brooke_BT, N.array(bt[BTcol])), range=(-0.05, 1.05), bins=15, histtype='step', color='k') P.xlabel(r'$[\rm{B}/\rm{T}]_r$') P.ylabel(r'$\rm{number}$') P.ylim(0, 20) P.minorticks_on() P.tight_layout() P.savefig('bulge_to_total_r_ratio_hist_with_INT_simmons13.pdf', frameon=False, transparent=True) # the x values we'll be using for all the linmix fits # (finely sampled so they don't look jagged) xs = N.linspace(0, 15, 500) # Now either load the linmix parameters from a file or re-fit the data # depending on what was specified when the program started if read_from_file: lmhr_chain = N.load('%s_haringrixfit.npy' % linmixfilebase) lmhr_alpha = lmhr_chain['alpha'] lmhr_beta = lmhr_chain['beta']
def collectData(pandaID): logFiles = [] logFiles.extend( Filestable4.objects.filter(pandaid=pandaID, type='log').values()) if len(logFiles) == 0: logFiles.extend( FilestableArch.objects.filter(pandaid=pandaID, type='log').values()) if not len(logFiles) == 1: return HttpResponse('Log files for pandaid=%s not found' % pandaID) logfile = logFiles[0] guid = logfile['guid'] lfn = logfile['lfn'] scope = logfile['scope'] http = urllib3.PoolManager() resp = http.request('GET', filebrowserURL, fields={ 'guid': guid, 'lfn': lfn, 'scope': scope, 'json': 1 }) if resp and len(resp.data) > 0: try: data = json.loads(resp.data) HOSTNAME = data['HOSTNAME'] tardir = data['tardir'] MEDIA_URL = data['MEDIA_URL'] dirprefix = data['dirprefix'] files = data['files'] files = [ f for f in files if 'memory_monitor_output.txt' in f['name'] ] except: return -2 else: return -2 urlBase = "http://" + HOSTNAME + "/" + MEDIA_URL + "/" + dirprefix + "/" + tardir dfl = [] pd.set_option('display.max_columns', 1000) for f in files: url = urlBase + f['dirname'] + "/" + f['name'] resp = http.request('GET', url) TESTDATA = StringIO.StringIO(resp.data) dfl.append(pd.read_csv(TESTDATA, sep="\t").iloc[:, range(9)]) if len(dfl) > 0: df = pd.concat(dfl) df.columns = [ 'Time', 'VMEM', 'PSS', 'RSS', 'Swap', 'rchar', 'wchar', 'rbytes', 'wbytes' ] df = df.sort_values(by='Time') tstart = df['Time'].min() df['Time'] = df['Time'].apply(lambda x: x - tstart) df['PSS'] = df['PSS'].apply(lambda x: x / 1024.0 / 1024.0) df['RSS'] = df['RSS'].apply(lambda x: x / 1024.0 / 1024.0) df['VMEM'] = df['VMEM'].apply(lambda x: x / 1024.0 / 1024.0) df['Swap'] = df['Swap'].apply(lambda x: x / 1024.0 / 1024.0) df['rchar'] = df['rchar'].apply(lambda x: x / 1024.0 / 1024.0) df['wchar'] = df['wchar'].apply(lambda x: x / 1024.0 / 1024.0) df['rbytes'] = df['rbytes'].apply(lambda x: x / 1024.0 / 1024.0) df['wbytes'] = df['wbytes'].apply(lambda x: x / 1024.0 / 1024.0) # Make plot for memory consumption f1 = plt.figure(figsize=(15, 10)) ax1 = f1.add_subplot(111) ax1.plot(df['Time'], df['PSS'], label="PSS") ax1.legend(loc="upper right") ax2 = f1.add_subplot(111) ax2.plot(df['Time'], df['RSS'], label="RSS") ax2.legend(loc="upper right") ax3 = f1.add_subplot(111) ax3.plot(df['Time'], df['Swap'], label="Swap") ax3.legend(loc="upper right") ax4 = f1.add_subplot(111) ax4.plot(df['Time'], df['VMEM'], label="VMEM") ax4.legend(loc="upper right") plt.title("Memory consumption, job " + str(pandaID)) plt.xlabel("time (s)") plt.ylabel("memory usage (GB)") plt.ylim(ymin=0) plt.xlim(xmin=0) plt.grid() minor_ticks = np.arange(0, plt.ylim()[1], 1) plt.minorticks_on() plt.yticks(minor_ticks) plot1img = StringIO.StringIO() plt.savefig(plot1img, format='png') plot1img.seek(0) #Make plot for IO f1 = plt.figure(figsize=(15, 10)) ax1 = f1.add_subplot(111) ax1.plot(df['Time'], df['rchar'], label="rchar") ax1.legend(loc="upper right") ax2 = f1.add_subplot(111) ax2.plot(df['Time'], df['wchar'], label="wchar") ax2.legend(loc="upper right") ax3 = f1.add_subplot(111) ax3.plot(df['Time'], df['rbytes'], label="rbytes") ax3.legend(loc="upper right") ax4 = f1.add_subplot(111) ax4.plot(df['Time'], df['wbytes'], label="wbytes") ax4.legend(loc="upper right") plt.title("IO, job " + str(pandaID)) plt.xlabel("time (s)") plt.ylabel("IO (MB)") plt.grid() plt.ylim(ymin=0) plt.xlim(xmin=0) plot2img = StringIO.StringIO() plt.savefig(plot2img, format='png') plot2img.seek(0) #Make plot for IO rate lasttime = 0 lastrchar = 0 lastwchar = 0 lastrbytes = 0 lastwbytes = 0 drchar = [0] dwchar = [0] drbytes = [0] dwbytes = [0] for index, row in df.iterrows(): if index > 0: dt = row['Time'] - lasttime drchar.append((row['rchar'] - lastrchar) / dt) dwchar.append((row['wchar'] - lastwchar) / dt) drbytes.append((row['rbytes'] - lastrbytes) / dt) dwbytes.append((row['wbytes'] - lastwbytes) / dt) lasttime = row['Time'] lastrchar = row['rchar'] lastwchar = row['wchar'] lastrbytes = row['rbytes'] lastwbytes = row['wbytes'] df['drchar'] = drchar df['dwchar'] = dwchar df['drbytes'] = drbytes df['dwbytes'] = dwbytes f1 = plt.figure(figsize=(15, 10)) ax1 = f1.add_subplot(111) ax1.plot(df['Time'], drchar, label="rchar") ax1.legend(loc="upper right") ax2 = f1.add_subplot(111) ax2.plot(df['Time'], dwchar, label="wchar") ax2.legend(loc="upper right") ax3 = f1.add_subplot(111) ax3.plot(df['Time'], drbytes, label="rbytes") ax3.legend(loc="upper right") ax4 = f1.add_subplot(111) ax4.plot(df['Time'], dwbytes, label="wbytes") ax4.legend(loc="upper right") plt.title("IO rate, job " + str(pandaID)) plt.xlabel("time (s)") plt.ylabel("IO rate (MB/S)") plt.grid() plt.ylim(ymin=0) plt.xlim(xmin=0) plot3img = StringIO.StringIO() plt.savefig(plot3img, format='png') plot3img.seek(0) #Here we combine few plots images = map(Image.open, [plot1img, plot2img, plot3img]) widths, heights = zip(*(i.size for i in images)) max_width = max(widths) total_height = sum(heights) new_im = Image.new('RGB', (max_width, total_height)) y_offset = 0 for im in images: new_im.paste(im, (0, y_offset)) y_offset += im.size[1] finPlotData = StringIO.StringIO() new_im.save(finPlotData, format='png') finPlotData.seek(0) if plot1img is not None: return HttpResponse(finPlotData.buf, content_type="image/png") return HttpResponse('')
print('\n----------------------------') print('spacing: ', space) print('time: ', opdays / opfact) # real time: ',tm(space)) print('F_lim: ', Flimval) print('difference: ', -(Fset - Flimval) / Fset) spacelst.append(space) timelst.append(opdays / opfact) Flimlst.append(7 * Flimval) space = space - dsp #print(space,Fset,Flimval,-(Fset - Flimval)/Fset) c1 = (0, 102 / 256, 204 / 256) c2 = (1, 0, 0) plt.axvline(0, color=(0, 0, 0), linewidth=0.8) plt.axhline(0, color=(0, 0, 0), linewidth=0.8) plt.minorticks_on() # set minor ticks plt.grid(which='major', linestyle='-', linewidth='0.3', color='black') # customise major grid plt.grid(which='minor', linestyle=':', linewidth='0.3', color='grey') # customise minor grid plt.axvline(200, color=(0, 153 / 256, 44 / 256), linestyle='--') plt.plot(spacelst, timelst, color=c1) #plt.xscale('log') plt.xlabel('Mesh Spacing [m]') plt.ylabel('Operational Time [Days]') plt.show()
def imss(fdic, key, cut=None, ax=None, extent=None, cbar=None, smth=None, nolabels=None, lblsz='small', **kwargs): """ A wrapper function for imshow to do most tedious stuff for my simulations """ old_ax = plt.gca() # Get Current Axis if ax is None: ax = old_ax else: plt.sca(ax) # Set Current Axis if type(key) is str: plt_val = fdic[key] else : plt_val = key if smth is not None: plt_val = gf(plt_val,sigma=smth) if cut is None: if len(plt_val.shape) == 2: IDX=np.s_[:,:] else: IDX=np.s_[:,:,0] else: if len(plt_val.shape) == 2: IDX=compute2didx([fdic['xx'],fdic['yy']],cut) else: IDX=compute2didx([fdic['xx'],fdic['yy'],fdic['zz']],cut) # Use the dict values of xx and yy to set extent ext = [fdic['xx'][IDX[0]][0], fdic['xx'][IDX[0]][-1], fdic['yy'][IDX[1]][0], fdic['yy'][IDX[1]][-1]] if kwargs.has_key('cmap'): cmap=kwargs.pop('cmap') else: cmap='PuOr' im = ax.imshow(plt_val[IDX].T, origin='low', extent=ext, cmap=cmap, # I just love this color map aspect='equal', **kwargs) if extent is not None: ax.set_xlim(extent[:2]) ax.set_ylim(extent[2:]) ax.autoscale(False) if nolabels is None: ax.set_xlabel(r'$X (d_i)$',size=lblsz) ax.set_ylabel(r'$Y (d_i)$',size=lblsz) ax.xaxis.set_tick_params(which='both',labelsize=lblsz) #minorLocator = AutoMinorLocator() # Note the second call is so that the minor x ticks are not #ax.xaxis.set_minor_locator(minorLocator) # the same as the y ticks ax.yaxis.set_tick_params(which='both',labelsize=lblsz) #minorLocator = AutoMinorLocator() #ax.yaxis.set_minor_locator(minorLocator) plt.minorticks_on() plt.sca(old_ax) # Code to implement for a cbar if cbar: divider = make_axes_locatable(ax) cax = divider.append_axes("right", "3%", pad="1.5%") plt.colorbar(im, cax=cax) cax.xaxis.set_tick_params(which='both',labelsize=lblsz) cax.yaxis.set_tick_params(which='both',labelsize=lblsz) plt.draw() return im,cax else: return im
def plot_raman(yscale="linear", figname="Raman.png", relative=False, w_min=None, w_max=None, ramanname=None): """ Plots a given Raman spectrum Input: yscale: Linear or logarithmic yscale figname: Name of the generated figure relative: Scale to the highest peak w_min, w_max: The plotting range wrt the Raman shift ramanname: Suffix used for the file containing the Raman spectrum Output: ramanname: image containing the Raman spectrum. """ import matplotlib matplotlib.use('Agg') # FIXME: Evil, none of this function's business import matplotlib.pyplot as plt import matplotlib.colors as colors import matplotlib.cm as cmx from ase.parallel import world # Plotting function if world.rank == 0: legend = isinstance(ramanname, (list, tuple)) if ramanname is None: RI_name = ["RI.npy"] elif type(ramanname) == list: RI_name = ["RI_{}.npy".format(name) for name in ramanname] else: RI_name = ["RI_{}.npy".format(ramanname)] ylabel = "Intensity (arb. units)" inferno = cm = plt.get_cmap('inferno') cNorm = colors.Normalize(vmin=0, vmax=len(RI_name)) scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cm) peaks = None for i, name in enumerate(RI_name): RI = np.real(np.load(name)) if w_min == None: w_min = np.min(RI[0]) if w_max == None: w_max = np.max(RI[0]) r = RI[1][np.logical_and(RI[0] >= w_min, RI[0] <= w_max)] w = RI[0][np.logical_and(RI[0] >= w_min, RI[0] <= w_max)] cval = scalarMap.to_rgba(i) if relative: ylabel = "I/I_max" r = r / np.max(r) if peaks is None: peaks = signal.find_peaks(r[np.logical_and( w >= w_min, w <= w_max)])[0] locations = np.take(w[np.logical_and(w >= w_min, w <= w_max)], peaks) intensities = np.take( r[np.logical_and(w >= w_min, w <= w_max)], peaks) if legend: plt.plot(w, r, color=cval, label=ramanname[i]) else: plt.plot(w, r, color=cval) for i, loc in enumerate(locations): if intensities[i] / np.max(intensities) > 0.05: plt.axvline(x=loc, color="grey", linestyle="--") # FIXME: usage of pyplot API plt.yscale(yscale) plt.minorticks_on() if legend: plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.) plt.title("Raman intensity") plt.xlabel("Raman shift (cm$^{-1}$)") plt.ylabel(ylabel) if not relative: plt.yticks([]) plt.savefig(figname, dpi=300) plt.clf()
def ad(l_args, s_ticker, s_interval, df_stock): parser = argparse.ArgumentParser( add_help=False, prog="ad", description=""" The Accumulation/Distribution Line is similar to the On Balance Volume (OBV), which sums the volume times +1/-1 based on whether the close is higher than the previous close. The Accumulation/Distribution indicator, however multiplies the volume by the close location value (CLV). The CLV is based on the movement of the issue within a single bar and can be +1, -1 or zero. \n \n The Accumulation/Distribution Line is interpreted by looking for a divergence in the direction of the indicator relative to price. If the Accumulation/Distribution Line is trending upward it indicates that the price may follow. Also, if the Accumulation/Distribution Line becomes flat while the price is still rising (or falling) then it signals an impending flattening of the price. """, ) parser.add_argument( "-o", "--offset", action="store", dest="n_offset", type=check_positive, default=0, help="offset", ) parser.add_argument( "--open", action="store_true", default=False, dest="b_use_open", help="uses open value of stock", ) try: ns_parser = parse_known_args_and_warn(parser, l_args) if not ns_parser: return # Daily if s_interval == "1440min": # Use open stock values if ns_parser.b_use_open: df_ta = ta.ad( high=df_stock["2. high"], low=df_stock["3. low"], close=df_stock["5. adjusted close"], volume=df_stock["6. volume"], offset=ns_parser.n_offset, open_=df_stock["1. open"], ).dropna() # Do not use open stock values else: df_ta = ta.ad( high=df_stock["2. high"], low=df_stock["3. low"], close=df_stock["5. adjusted close"], volume=df_stock["6. volume"], offset=ns_parser.n_offset, ).dropna() # Intraday else: # Use open stock values if ns_parser.b_use_open: df_ta = ta.ad( high=df_stock["2. high"], low=df_stock["3. low"], close=df_stock["4. close"], volume=df_stock["5. volume"], offset=ns_parser.n_offset, open_=df_stock["1. open"], ).dropna() # Do not use open stock values else: df_ta = ta.ad( high=df_stock["2. high"], low=df_stock["3. low"], close=df_stock["4. close"], volume=df_stock["5. volume"], offset=ns_parser.n_offset, ).dropna() plt.figure(figsize=plot_autoscale(), dpi=PLOT_DPI) axPrice = plt.subplot(211) if s_interval == "1440min": plt.plot(df_stock.index, df_stock["5. adjusted close"].values, "k", lw=2) else: plt.plot(df_stock.index, df_stock["4. close"].values, "k", lw=2) plt.title(f"Accumulation/Distribution Line (AD) on {s_ticker}") plt.xlim(df_stock.index[0], df_stock.index[-1]) plt.ylabel("Share Price ($)") plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) _ = axPrice.twinx() if s_interval == "1440min": plt.bar( df_stock.index, df_stock["6. volume"].values, color="k", alpha=0.8, width=0.3, ) else: plt.bar( df_stock.index, df_stock["5. volume"].values, color="k", alpha=0.8, width=0.3, ) plt.subplot(212) plt.plot(df_ta.index, df_ta.values, "b", lw=1) plt.xlim(df_stock.index[0], df_stock.index[-1]) plt.axhline(0, linewidth=2, color="k", ls="--") plt.legend(["Chaikin Oscillator"]) plt.xlabel("Time") plt.grid(b=True, which="major", color="#666666", linestyle="-") plt.minorticks_on() plt.grid(b=True, which="minor", color="#999999", linestyle="-", alpha=0.2) if gtff.USE_ION: plt.ion() plt.show() print("") except Exception as e: print(e) print("") return