def plotDC(vp,block,trial): ''' plot gaze during drift correction''' from Preprocess import readEyelink plt.interactive(False) # vp=1 # from readETData import readEyelink # for b in range(4,23): # print 'block ', b # data=readEyelink(vp,b) # for i in range(0,len(data)): b=block;i=trial data=readEyelink(vp,b) d=data[i] gg=d.getGaze(phase=3) plt.plot(gg[:,0],gg[:,1],'g--') plt.plot(gg[:,0],gg[:,2],'r--') plt.plot(gg[:,0],gg[:,4],'b--') plt.plot(gg[:,0],gg[:,5],'k--') d.extractBasicEvents() d.driftCorrection(jump=manualDC(vp,b,i)) gg=d.getGaze(phase=3) plt.plot(gg[:,0],gg[:,1],'g') plt.plot(gg[:,0],gg[:,2],'r') plt.plot(gg[:,0],gg[:,4],'b') plt.plot(gg[:,0],gg[:,5],'k') plt.plot([gg[0,0],gg[-1,0]],[0,0],'k') plt.plot(d.dcfix,[-0.45,-0.45],'k',lw=2) plt.grid() plt.ylim([-0.5,0.5]) plt.legend(['left x','left y','right x','right y']) plt.savefig(PATH+'dc'+os.path.sep+'vp%03db%02dtr%02d'%(vp,b,i)) plt.cla()
def plot_uncertainty_bounds_s(self, multiplier=200, *args, **kwargs): ''' Plots complex uncertainty bounds plot on smith chart. This function plots the complex uncertainty of a NetworkSet as circles on the smith chart. At each frequency a circle with radii proportional to the complex standard deviation of the set at that frequency is drawn. Due to the fact that the `markersize` argument is in pixels, the radii can scaled by the input argument `multiplier`. default kwargs are { 'marker':'o', 'color':'b', 'mew':0, 'ls':'', 'alpha':.1, 'label':None, } Parameters ------------- multipliter : float controls the circle sizes, by multiples of the standard deviation. ''' default_kwargs = { 'marker': 'o', 'color': 'b', 'mew': 0, 'ls': '', 'alpha': .1, 'label': None, } default_kwargs.update(**kwargs) if plb.isinteractive(): was_interactive = True plb.interactive(0) else: was_interactive = False [ self.mean_s[k].plot_s_smith(*args, ms=self.std_s[k].s_mag * multiplier, **default_kwargs) for k in range(len(self[0])) ] if was_interactive: plb.interactive(1) plb.draw() plb.show()
def main(): import pylab #mydirectory=r'D:\BiFeO3film\Mar27_2011' mydirectory = r'/net/charlotte/var/ftp/pub/ncnrdata/bt9/201102/ylem/BiFeO3film/Mar27_2011' myend = 'bt9' pylab.interactive(True) for dataset in sys.argv[1:]: data = read_data(mydirectory, "mesh" + dataset, myend) plot(data) _ = raw_input("> ")
def plot_uncertainty_bounds_s(self, multiplier =200, *args, **kwargs): ''' Plots complex uncertainty bounds plot on smith chart. This function plots the complex uncertainty of a NetworkSet as circles on the smith chart. At each frequency a circle with radii proportional to the complex standard deviation of the set at that frequency is drawn. Due to the fact that the `markersize` argument is in pixels, the radii can scaled by the input argument `multiplier`. default kwargs are { 'marker':'o', 'color':'b', 'mew':0, 'ls':'', 'alpha':.1, 'label':None, } Parameters ------------- multipliter : float controls the circle sizes, by multiples of the standard deviation. ''' default_kwargs = { 'marker':'o', 'color':'b', 'mew':0, 'ls':'', 'alpha':.1, 'label':None, } default_kwargs.update(**kwargs) if plb.isinteractive(): was_interactive = True plb.interactive(0) else: was_interactive = False [self.mean_s[k].plot_s_smith(*args, ms = self.std_s[k].s_mag*multiplier, **default_kwargs) for k in range(len(self[0]))] if was_interactive: plb.interactive(1) plb.draw() plb.show()
def create_plot(self, figsize=(16, 9)): pylab.interactive(True) pylab.figure(figsize=figsize) pylab.title(self.description) pylab.xlabel('time') pylab.ylabel('points') pylab.grid(True) self.figure = pylab.gcf() self.ax = pylab.gca()
def optimizeAR1Model(obs_spectrum, freqs, f_nyquist, N, maxfreq=10, init_est = None): interactive(True) # plot(freqs, obs_spectrum) if init_est == None: init_est = (0.0, 0.5) kd,cov,infodict,mesg,ier = optimize.leastsq(residuals, init_est, args=(obs_spectrum, freqs, f_nyquist, N, maxfreq), epsfcn=0.001, ftol=1e-16, gtol=1e-16, xtol=1e-16, maxfev=10000, full_output=True) return kd, mesg, ier
def savefig(fname,figsize,fig=None,**kwargs): """ force saving figure with a given size, useful when using tiling wm; if fname is a list, it saves multiple files, for example [todel.pdf,todel.png] """ if isinstance(fname,str): fname = (fname,) if fig is None: fig = plt.gcf() old_bkg = plt.get_backend() old_inter = plt.isinteractive() try: plt.switch_backend("cairo") old_height = fig.get_figheight() old_width = fig.get_figwidth() fig.set_figwidth ( figsize[0] ) fig.set_figheight( figsize[1] ) [ fig.savefig(f,**kwargs) for f in fname ] plt.switch_backend(old_bkg) finally: plt.switch_backend(old_bkg) plt.interactive(old_inter)
def main(): import pylab as pl from .pncparse import pncparse ifiles, options = pncparse(has_ofile = True, plot_options = True, interactive = False) if len(ifiles) != 1: raise IOError('pncview can operate on only 1 file; user requested %d' % len(ifiles)) ifile, = ifiles pl.interactive(True) for method_vars in options.plotcommands: pieces = method_vars.split(',') plotargs = [p for p in pieces if '=' not in p] plotkwds = [p for p in pieces if '=' in p] method, = plotargs[:1] vars = plotargs[1:] plotoptions = eval('OptionDict(outpath="%s",%s)' % (options.outpath, ','.join(plotkwds))) print(plotoptions.logscale) plotwithopts(ifile, method, vars, plotoptions) pl.interactive(False) if len(options.plotcommands) == 0: pncview(ifile, options)
def plotsweep(d, exp, vals=None, ivar=None, globals=None, labelprefix='', plotter=None): interact = pyl.isinteractive() if interact: pyl.interactive(False) if not vals: vals = d.sweepvals for v in vals: s = d.getSweep(v) if isinstance(exp, str): if globals: y = eval(exp, globals, locals()) else: y = eval(exp) else: y = exp(s) if plotter: p = plotter else: p = pyl.plot if ivar: if isinstance(ivar, str): if globals: x = eval(exp, globals, locals()) else: x = eval(exp) else: x = exp(s) else: x = s.x p(x, y, label='%s%s=%g' % (labelprefix, s.sweepvar, s.sweepval)) pyl.interactive(interact) pyl.legend(loc='best')
def plot_weights(model_info, pairs): # type: (ModelInfo, List[Tuple[np.ndarray, np.ndarray]]) -> None """ Plot the weights returned by :func:`get_weights`. *model_info* is :param model_info: :param pairs: :return: """ import pylab if any(len(values) > 1 for values, weights in pairs): labels = [p.name for p in model_info.parameters.call_parameters] pylab.interactive(True) pylab.figure() for (v, w), s in zip(pairs, labels): if len(v) > 1: #print("weights for", s, v, w) pylab.plot(v, w, '-o', label=s) pylab.grid(True) pylab.legend()
def main(): import sys diff = "relative" xrange = "log" options = [v for v in sys.argv[1:] if v.startswith('-')] for opt in options: if opt == '-f': diff = "none" elif opt == '-r': diff = "relative" elif opt == '-a': diff = "absolute" elif opt.startswith('-x'): xrange = opt[2:] else: usage() names = [v for v in sys.argv[1:] if not v.startswith('-')] if not names: usage() if names[0] == "all": cutoff = names[1] if len(names) > 1 else "" names = list(sorted(ALL_FUNCTIONS)) names = [k for k in names if k >= cutoff] if any(k not in FUNCTIONS for k in names): usage() multiple = len(names) > 1 pylab.interactive(multiple) for k in names: pylab.clf() comparator = FUNCTIONS[k] comparator.run(xrange=xrange, diff=diff) if multiple: raw_input() if not multiple: pylab.show()
def main(): import pylab as pl from .pncparse import pncparse ifiles, options = pncparse(has_ofile=True, plot_options=True, interactive=False) if len(ifiles) != 1: raise IOError('pncview can operate on only 1 file; user requested %d' % len(ifiles)) ifile, = ifiles pl.interactive(True) for method_vars in options.plotcommands: pieces = method_vars.split(',') plotargs = [p for p in pieces if '=' not in p] plotkwds = [p for p in pieces if '=' in p] method, = plotargs[:1] vars = plotargs[1:] plotoptions = eval('OptionDict(outpath="%s",%s)' % (options.outpath, ','.join(plotkwds))) print(plotoptions.logscale) plotwithopts(ifile, method, vars, plotoptions) pl.interactive(False) if len(options.plotcommands) == 0: pncview(ifile, options)
#!/usr/bin/env python ''' This is a scripts allowing the user to specify the path to a morphology file, and determine the rotation angles that will rotate the apical dendrite along the vertical z-axis. If desired, it will create a .rot-file that LFPy will automatically use to set the default rotation alongside the morphology. ''' #import some stuff import pylab as pl import LFPy import os #plot will pop up by itself pl.interactive(1) ''' Define some functions for plotting ''' def plot_linepiece(ax, cell, i, color): ax.plot([cell.xstart[i], cell.xend[i]], [cell.ystart[i], cell.yend[i]], [cell.zstart[i], cell.zend[i]], color=color, lw=cell.diam[i]) def plot_morpho_indices(cell, new_fig=True): from mpl_toolkits.mplot3d import Axes3D if new_fig: fig = pl.figure(figsize=[10, 10])
""" i_min, i_max = np.where(mtx.mean(1))[0][[0,-1]] P.figure(figsize=(14.5,8)) P.stem(np.arange(i_max+1-i_min),mtx[i_min:i_max+1,:].sum(1)) ttl = 'Note Frequency' if tstr: ttl+=': '+tstr P.title(ttl,fontsize=16) t=P.xticks(np.arange(0,i_max+1-i_min,3),pc_labels[i_min:i_max+1:3],fontsize=14) P.xlabel('Pitch Class', fontsize=14) P.ylabel('Frequency', fontsize=14) ax = P.axis() P.axis(xmin=-0.5) P.grid() if __name__ == "__main__": P.interactive(True) a = np.loadtxt('01.ascii') P.figure() # Plot piano roll: MIDI pitch by beats P.subplot(211) plot_mtx(a, cmap=P.cm.gray_r, cbar=False) P.axis('tight') P.title('WTC 1 "Prelude in C": Piano Roll') # Plot dissonance by (integrated) beats P.subplot(212) win_len=8 # Number of beats to integrate, non-overlapping a = win_mtx(a, win_len) d = dissonance_fun(a) P.plot(np.arange(len(d))*win_len, d,'r',linewidth=1) P.axis('tight')
rank=4, levels=6) N = np.sqrt(u**2 + v**2) pl.figure() pl.imshow(N) pl.title('Norm of OPTIC to RADAR registration') pl.colorbar() Ioptique_resampled = wrapData(Ioptique, u, v) C = np.dstack((Ioptique / 255, Iradar / 255, Ioptique / 255)) pl.figure() pl.imshow(C) pl.title('Imfuse of RADAR and OPTIC') D = np.dstack( (Ioptique_resampled / 255, Iradar / 255, Ioptique_resampled / 255)) pl.figure() pl.imshow(D) pl.title('Imfuse of RADAR and OPTIC after coregistration') print("Fin recalage optique/Radar \n\n") if __name__ == '__main__': demo() pl.show() else: pl.interactive(True) demo()
def error_report(clf, X, y, y_scores=None, ind=None, spec_func=None): """Generate error report as a multi page pdf. This functions plots the ROC curve of ``clf`` and spectrograms for the top ``k`` false negatives, false positives, true positives, and true negatives. Parameters ---------- clf : BaseEstimator A trained classifier X : ndarray A data array, used to generate the spectrograms (using ``spec_func``) and optionally ``y_scores``. """ if y_scores is None: if hasattr(clf, 'decision_function'): y_scores = clf.decision_function(X) else: y_scores = clf.predict_proba(X)[:, 1] if ind is None: ind = np.arange(X.shape[0]) plt.interactive(False) signature = hashlib.md5(repr(clf)).hexdigest() fname = 'error_report_%s.pdf' % signature pdf = PdfPages(fname) # frontpage fig = plt.figure(figsize=(8.27, 11.69)) fig.text(0.5, .9, "Error Report", horizontalalignment='center', size=20) fig.text(0.5, .75, str(datetime.now()), horizontalalignment='center', size=12) fig.text(0.5, .5, pprint.pformat(clf), horizontalalignment='center', size=10) plt.savefig(pdf, format='pdf') plt.close() # roc curve print('_' * 80) print 'roc curve' print fig = plt.figure(figsize=(8.27, 8.27)) _plot_roc(y, y_scores, fig.gca()) plt.savefig(pdf, format='pdf') plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type='fp', k=20) plt.savefig(pdf, format='pdf') plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type='fn', k=20) plt.savefig(pdf, format='pdf') plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type='tp', k=20) plt.savefig(pdf, format='pdf') plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type='tn', k=20) plt.savefig(pdf, format='pdf') plt.close() pdf.close() plt.interactive(True)
#for x in table.where('(sId == currSubId)') ] vec = [ [x['sStart'],x['sEnd'],x['dIdent'] ] \ for x in table.where('(gi == currGi) & (sId == currSubId)') ] print "num records = ", len(vec) minSStart = min([r[0] for r in vec]) maxSEnd = max([r[1] for r in vec]) print "min s.start, max s.end = ", minSStart, maxSEnd h5file.close() #### #### Plotting #### fig = pylab.figure(1) ax = pylab.subplot(111) pylab.interactive(False) ### ### Gen gradient color code ### t0 = default_timer() cIndex = 0 xpairs = [] ypairs = [] for i in range(0, len(vec)): xs = vec[i][0] xe = vec[i][1] y = vec[i][2] x2 = [xs, xe] y2 = [y, y]
data = (2e-3*x + 2.0) + data ################################################################### ################ FITTING SECOND DERIVATIVE SPECTRA ################ ################################################################### intensity1 = [] position1 = [] linewidth1=[] fits1 = [] bp = [] positions = [] second_derivative_spectra = [] x_values = [] pylab.interactive(False) ## smoothing algorithms for real data ## #data = triangular(data,10) #data = savitzky_golay(data,11,order=3) ## calculating the second derivative ## x_fit1 = [] x_fit2 = [] for j in range(len(x)-1): x_fit1.append((x[j]+x[j+1])/2) for j in range(len(x_fit1)-1): x_fit2.append((x_fit1[j]+x_fit1[j+1])/2) first_der_spec = basic_num_diff(x,data) second_der_spec = numpy.array(basic_num_diff(x_fit1,first_der_spec))
time.sleep(self.ontime) GPIO.output("P9_24", 0) off = time.time() self.ot = off - wake self.error = (self.sleeptime > (self.delay + 0.05)) or (self.ot > (self.ontime + 0.05)) self.request.clear() def notify(self) : # video thread calls this to ask for another LED pulse. self.request.set() lt = LedThread(delay=0.000, ontime=0.008) at = AcquireThread(lt) lt.start() at.start() from pylab import interactive, imshow, plot interactive(True) def show(img) : """ make an 8 bit copy of the image. swap the red and the green pixels. this will mess up the timing of the other threads """ img = clip(img,0,255) icop = array(img, dtype=uint8) icop[:,:,0] = img[:,:,2] icop[:,:,2] = img[:,:,0] imshow(icop, interpolation="nearest")
# Changing plot limits: import pylab as plb plb.figure(figsize=(6, 3), dpi=100) d = plb.linspace(-plb.pi * 2, plb.pi * 2, 128, endpoint=True) d_sin = plb.sin(d) d_cos = plb.cos(d) #we now set the x,y limits for the 'sin' function plb.subplot(2, 1, 1) plb.plot(d, d_sin, color="blue", linewidth=1.2, linestyle="-", label="sin") plb.legend(loc="upper right") plb.xlim(d_sin.min() * 6.5, d_sin.max() * 6.5) plb.ylim(d_sin.min() * 1.2, d_sin.max() * 1.2) plb.xticks([-plb.pi * 2, -plb.pi, 0, plb.pi, plb.pi * 2], ['$-2\pi$', '$-\pi$', '$0$', '$+\pi$', '$+2\pi$']) plb.yticks([-1, 0, 1], ['$-1$', '$0$', '$+1$']) plb.title('Plot of Sin and Cos functions') #below we set the x,y limits for the 'cos' function plb.subplot(2, 1, 2) plb.plot(d, d_cos, color="red", linewidth=1, linestyle="--", label="cos") plb.legend(loc='lower right') plb.xlim(d_cos.min() * 6.5, d_cos.max() * 6.5) plb.ylim(d_cos.min() * 1.2, d_cos.max() * 1.2) plb.interactive(True) plb.pause(10) plb.show()
def ex_plot_cg_ppi(): pl.interactive(True) # load a polar scan and create range and azimuth arrays accordingly data = np.loadtxt( os.path.dirname(__file__) + '/' + 'data/polar_dBZ_tur.gz') r = np.arange(0, data.shape[1]) az = np.arange(0, data.shape[0]) # mask data array for better presentation mask_ind = np.where(data <= np.nanmin(data)) data[mask_ind] = np.nan ma = np.ma.array(data, mask=np.isnan(data)) # cgax - curvelinear grid axis # Main axis # caax - twin cartesian axis # secondary axis for cartesian coordinates (plotting, labeling etc.) # paax - polar axis for plotting # here all plotting in polar data is done # pm - pcolormesh # actual plot mappable # Remark #1: # The tight_layout function is great, but may not lead to # satisfactory results in the first place. So labels, annotations # and/or axes may need adjustment # Remark #2: # This examples makes heavy use of new matlotlib functionality. See # function help for more information. #---------------------------------------------------------------- # First, creation of four simple figures # figure #1 # the simplest call, plot cg ppi in new window # plot simple CG PPI wradlib.vis.plot_cg_ppi(ma, refrac=False) t = pl.title('Simple CG PPI') t.set_y(1.05) pl.tight_layout() #---------------------------------------------------------------- # figure #2 # now let's just plot a sector of data # for this, we need to give the ranges and azimuths explicitly # and one more than we pass on in the data, because we also may not use # the autoext-feature, and otherwise the last row and column of our data # would not be plotted cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma[200:250, 40:80], r[40:81], az[200:251], autoext=False, refrac=False) t = pl.title('Sector CG PPI') t.set_y(1.05) pl.tight_layout() # plot some additional polar and cartesian data # cgax and caax plot both cartesian data # paax plots polar data # plot on cartesian axis caax.plot(-60, -60, 'ro', label="caax") caax.plot(-50, -70, 'ro') # plot on polar axis xx, yy = np.meshgrid(230, 90) paax.plot(xx, yy, 'bo') paax.plot(220, 90, 'bo', label="paax") # plot on cg axis (same as on cartesian axis) cgax.plot(-50, -60, 'go', label="cgax") # legend on main cg axis cgax.legend() #---------------------------------------------------------------- # figure #3 # now let's plot with given range and theta arrays # and plot some annotation and colorbar cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma, r, az, autoext=True, refrac=False) t = pl.title('Decorated CG PPI') t.set_y(1.05) cbar = pl.gcf().colorbar(pm, pad=0.075) caax.set_xlabel('x_range [km]') caax.set_ylabel('y_range [km]') pl.text(1.0, 1.05, 'azimuth', transform=caax.transAxes, va='bottom', ha='right') cbar.set_label('reflectivity [dBZ]') pl.tight_layout() #---------------------------------------------------------------- # figure #4 # now let's just plot a sector of data # and plot some annotation and colorbar # create an floating axis for range cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma[200:250, 40:80], r[40:81], az[200:251], autoext=False, refrac=False) t = pl.title('Decorated Sector CG PPI') t.set_y(1.05) cbar = pl.gcf().colorbar(pm, pad=0.075) caax.set_xlabel('x_range [km]') caax.set_ylabel('y_range [km]') pl.text(1.0, 1.05, 'azimuth', transform=caax.transAxes, va='bottom', ha='right') cbar.set_label('reflectivity [dBZ]') cgax.axis["lat"] = cgax.new_floating_axis(0, 240) cgax.axis["lat"].set_ticklabel_direction('-') cgax.axis["lat"].label.set_text("range [km]") cgax.axis["lat"].label.set_rotation(180) cgax.axis["lat"].label.set_pad(10) pl.tight_layout() #---------------------------------------------------------------- # figure #5 # plot figure #1-4 in one figure 2x2 grid pl.figure() # figure #5-1 # the simplest call, plot cg ppi in new window # plot simple CG PPI wradlib.vis.plot_cg_ppi(ma, refrac=False, subplot=221) t = pl.title('Simple CG PPI') t.set_y(1.05) pl.tight_layout() #---------------------------------------------------------------- # figure #5-2 # now let's just plot a sector of data # for this, we need to give the ranges and azimuths explicitly # and one more than we pass on in the data, because we also may not use # the autoext-feature, and otherwise the last row and column of our data # would not be plotted cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma[200:250, 40:80], r[40:81], az[200:251], autoext=False, refrac=False, subplot=222) t = pl.title('Sector CG PPI') t.set_y(1.05) pl.tight_layout() #---------------------------------------------------------------- # figure #5-3 # now let's plot with given range and theta arrays # and plot some annotation and colorbar cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma, r, az, autoext=True, refrac=False, subplot=223) t = pl.title('Decorated CG PPI') t.set_y(1.05) cbar = pl.gcf().colorbar(pm, pad=0.075) caax.set_xlabel('x_range [km]') caax.set_ylabel('y_range [km]') pl.text(1.0, 1.05, 'azimuth', transform=caax.transAxes, va='bottom', ha='right') cbar.set_label('reflectivity [dBZ]') pl.tight_layout() #---------------------------------------------------------------- # figure #5-4 # now let's just plot a sector of data # and plot some annotation and colorbar # create an floating axis for range cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma[200:250, 40:80], r[40:81], az[200:251], autoext=False, refrac=False, subplot=224) t = pl.title('Decorated Sector CG PPI') t.set_y(1.05) cbar = pl.gcf().colorbar(pm, pad=0.075) caax.set_xlabel('x_range [km]') caax.set_ylabel('y_range [km]') pl.text(1.0, 1.05, 'azimuth', transform=caax.transAxes, va='bottom', ha='right') cbar.set_label('reflectivity [dBZ]') cgax.axis["lat"] = cgax.new_floating_axis(0, 240) cgax.axis["lat"].set_ticklabel_direction('-') cgax.axis["lat"].label.set_text("range [km]") cgax.axis["lat"].label.set_rotation(180) cgax.axis["lat"].label.set_pad(10) pl.tight_layout() #---------------------------------------------------------------- # figure #6 # create figure with GridSpec pl.figure() gs = gridspec.GridSpec(5, 5) cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma, refrac=False, subplot=gs[0:3, 0:3]) cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma, refrac=False, subplot=gs[0:3, 3:5]) cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma, refrac=False, subplot=gs[3:5, 0:3]) cgax, caax, paax, pm = wradlib.vis.plot_cg_ppi(ma, refrac=False, subplot=gs[3:5, 3:5]) t = pl.gcf().suptitle('GridSpec CG Example') pl.tight_layout() #---------------------------------------------------------------- # figure #7 # create figure with co-located x and y-axis # using axesgrid1 toolkit x = np.random.randn(ma.shape[1]) y = np.random.randn(ma.shape[1]) cgax, caax, paax, cgpm = wradlib.vis.plot_cg_ppi( ma, refrac=False, ) divider = make_axes_locatable(cgax) axHistX = divider.append_axes("top", size=1.2, pad=0.1, sharex=caax) axHistY = divider.append_axes("right", size=1.2, pad=0.1, sharey=caax) # make some labels invisible axHistX.xaxis.set_major_formatter(NullFormatter()) axHistY.yaxis.set_major_formatter(NullFormatter()) axHistX.hist(x) if not pl.matplotlib.__version__ == "1.2.1": # There is a bug in matplotlib 1.2.1, # see https://github.com/matplotlib/matplotlib/pull/1985 axHistY.hist(y, orientation='horizontal') else: axHistY.text(0.5, 0.5, "Does not work with\nmatplotlib 1.2.1", horizontalalignment="center", rotation=90, fontsize=15, color="red") t = pl.gcf().suptitle('AxesDivider CG Example') pl.tight_layout() pl.show()
def maps_from_echse(conf): """Produces time series of rainfall maps from ECHSE input data and catchment shapefiles. """ # Read sub-catchment rainfall from file fromfile = np.loadtxt(conf["f_data"], dtype="string", delimiter="\t") if len(fromfile)==2: rowix = 1 elif len(fromfile)>2: rowix = slice(1,len(fromfile)) else: raise Exception("Data file is empty: %s" % conf["f_data"]) var = fromfile[rowix,1:].astype("f4") dtimes = fromfile[rowix,0] dtimes = np.array([wradlib.util.iso2datetime(dtime) for dtime in dtimes]) dtimesfromconf = wradlib.util.from_to(conf["tstart"], conf["tend"], conf["interval"]) dtimes = np.intersect1d(dtimes, dtimesfromconf) if len(dtimes)==0: print "No datetimes for mapping based on intersection of data file and config info." return(0) # objects = fromfile[0,1:] cats = plt.genfromtxt(conf["f_coords"], delimiter="\t", names=True, dtype=[('id', '|S20'), ('lat', 'f4'), ('lon', 'f4'), ('x', 'f4'), ('y', 'f4')]) mapx, mapy = wradlib.georef.reproject(cats["x"],cats["y"], projection_source=conf["trg_proj"], projection_target=conf["map_proj"]) # Read shapefile dataset, inLayer = wradlib.io.open_shape(conf["f_cats_shp"]) polys, keys = wradlib.georef.get_shape_coordinates(inLayer, key='DN') keys = np.array(keys) # Preprocess polygons (remove minors, sort in same order as in coords file) polys2 = [] for i, id in enumerate(cats["id"]): keyix = np.where( keys==eval(id.strip("cats_")) )[0] if len(keyix) > 1: # More than one key matching? Find largest matching polygon keyix = keyix[np.argmax([len(polys[key]) for key in keyix])] else: keyix = keyix[0] poly = polys[keyix].copy() if poly.ndim==1: # Multi-Polygons - keep only the largest polygon # (just for plotting - no harm done) poly2 = poly[np.argmax([len(subpoly) for subpoly in poly])].copy() else: poly2 = poly.copy() polys2.append ( wradlib.georef.reproject(poly2, projection_source=conf["trg_proj"], projection_target=conf["map_proj"]) ) colors = plt.cm.spectral(np.linspace(0,1,len(conf["levels"]))) mycmap, mynorm = from_levels_and_colors(conf["levels"], colors, extend="max") plt.interactive(False) for i, dtime in enumerate(dtimes): datestr = (dtime-dt.timedelta(seconds=conf["interval"])).strftime("%Y%m%d.png") print datestr figpath = os.path.join(conf["savefigs"], datestr) fig = plt.figure(figsize=(6,6)) ax = fig.add_subplot(111, aspect="equal") ax, coll = plot_cats(polys2, var[i], ax=ax, bbox=conf["bbox"], cmap=mycmap, norm=mynorm, edgecolors='none') cb = plt.colorbar(coll, ax=ax, ticks=conf["levels"], shrink=0.6) cb.ax.tick_params(labelsize="small") cb.set_label("(mm)") plt.xlabel("Longitude") plt.ylabel("Latitude") plot_trmm_grid_lines(ax) plt.text(conf["bbox"]["left"]+0.25, conf["bbox"]["top"]-0.25, "%s\n%s to\n%s" % (conf["figtxtbody"], (dtime-dt.timedelta(seconds=conf["interval"])).isoformat(" "), dtime.isoformat(" ") ), color="red", fontsize="small", verticalalignment="top") plt.tight_layout() plt.savefig(figpath) plt.close() plt.interactive(True)
def parse_and_plot_ref(runfile, spectrum_file): fields = [('wl', 'f8'), ('gf', 'f8'), ('z', 'i'), ('istg', 'i'), ('chi', 'f8')] ref = N.loadtxt("ref.dat", dtype=fields) model = N.loadtxt(spectrum_file) mylist = parse_runsynow(runfile) numref = mylist['parms']['numref'] an = [] ai = [] for x,y,z in zip(mylist['parms']['tau1'][:numref],\ mylist['parms']['an'][:numref],\ mylist['parms']['ai'][:numref]): if x > 0.: an.append(y) ai.append(z) ions_used = [z * 100 + istg for z, istg in zip(an, ai)] ref_ions = [] for i in xrange(N.size(ref['wl'])): ref_ions.append(ref['z'][i] * 100 + ref['istg'][i]) ref_index = [] for ion in ions_used: ref_index.append(ref_ions.index(ion)) pylab.interactive(True) # One can supply an argument to AutoMinorLocator to # specify a fixed number of minor intervals per major interval, e.g.: # minorLocator = AutoMinorLocator(2) # would lead to a single minor tick between major ticks. minorLocator = AutoMinorLocator() golden = (pylab.sqrt(5) + 1.) / 2. figprops = dict(figsize=(8., 8. / golden), dpi=128) # Figure properties for single and stacked plots # figprops = dict(figsize=(16., 8./golden), dpi=128) # Figure properties for side by sides adjustprops = dict(left=0.15, bottom=0.1, right=0.90, top=0.93, wspace=0.2, hspace=0.2) # Subp fig = pylab.figure(1, **figprops) # New figure fig.clf() fig.subplots_adjust(**adjustprops) # Tunes the subplot layout ax1 = fig.add_subplot(1, 1, 1) my_funcs.bold_labels(ax1) p1, = ax1.plot(model[:, 0], model[:, 1], linewidth=2.0) ax1.set_ylabel(r'$F_\lambda$', fontsize=14) ax1.set_xlabel(r'$\lambda\ (\AA)$', fontsize=14) # ax1.set_xlim([0.,60.]) # ax1.set_ylim([10.**41.4,10.**43.5]) # ax1.set_yscale('log') # ax1.legend([p1,p2,p3,p4],['Day 10','Day 15','Day 25','Day 50'],frameon=False) ax1.xaxis.set_minor_locator(minorLocator) pylab.tick_params(which='both', width=2) pylab.tick_params(which='major', length=7) pylab.tick_params(which='minor', length=4, color='r') #ax1.xaxis.grid(True,which='minor') ax1.xaxis.grid(True, which='both') wl_ref = [] f_ref = [] ymin, ymax = ax1.get_ybound() for i in ref_index: wl_ref.append([10. * ref['wl'][i], 10. * ref['wl'][i]]) ihelp = N.abs(model[:, 0] - 10. * ref['wl'][i]).argmin() yhelp = model[ihelp, 1] f_ref.append([ymin, yhelp]) for x, y in zip(wl_ref, f_ref): ax1.plot(x, y, lw=2) fields = [('Z','i'),('A','f8'),('Name','S13'),('sym','S4'),('MP','f8'),\ ("BP",'f8'),('rho','f8'),('crust','f8'),('year','i'),\ ('group','i'), ('config','S23'), ('chiion',"f8")] # labels = N.loadtxt("periodic_table.dat",skiprows=1,delimiter=',',dtype=fields) labels = N.genfromtxt("periodic_table.dat", skip_header=1, delimiter=',', dtype=None) syms = [] for x in labels['f3']: syms.append(x.replace(" ", "")) ref_Zs = [] for z in labels['f0']: ref_Zs.append(z) sym_indices = [] for z in an: sym_indices.append(ref_Zs.index(z)) spect_notation = [ "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX", "X" ] text_labels = [] for i, j in enumerate(sym_indices): help = syms[j] + " " + spect_notation[ai[i]] text_labels.append(help) for x, y, l in zip(wl_ref, f_ref, text_labels): ax1.text(x[0], min(y[1] * 1.08, ymax), l, fontsize=8)
After retrieving the parameters for the two models, this script will plot the results. IN: alpha11, mu11, sigma11, alpha12, mu12, sigma12 """ alpha1 = float(argv[1]) mu1 = float(argv[2]) sigma1 = float(argv[3]) alpha2 = float(argv[4]) mu2 = float(argv[5]) sigma2 = float(argv[6]) alpha1_sqrt_pi_sigma1 = (alpha1 / (sqrt(2 * pi) * sigma1)) alpha1_sqrt_pi_sigma2 = (alpha2 / (sqrt(2 * pi) * sigma2)) time = pl.linspace(0, 30) pl.interactive(False) def normal_distribution(k): length = len(k) result = np.zeros(length) for i in range(0, length): result[i] = alpha1_sqrt_pi_sigma2 * (e**((-0.5) * pow( (k[i] - mu2) / sigma2, 2))) return result def upside_down_normal_distribution(k): length = len(k) result = np.zeros(length) for i in range(0, length):
""" Communication with coppeliasim\n Forked from https://github.com/dsaldana/CSE360-MobileRobotics """ import sim import time import numpy as np from numpy import array import pylab from math import pi pylab.interactive(True) # Put these in __init__()? sim.simxFinish(-1) # Close opened connections clientID = sim.simxStart('127.0.0.1', 19999, True, True, 5000, 5) # Connect to CoppeliaSim class robot(): def __init__(self, frame_name, motor_names=[], client_id=0): # If there is an existing connection if client_id: self.client_id = client_id else: self.client_id = self.open_connection() self.motors = self._get_handlers(motor_names) # Robot frame self.frame = self._get_handler(frame_name)
def fe55_gain_fitter(signals, ccdtemp=-95, make_plot=False, xrange=None, bins=100, hist_nsig=10, title='', plot_filename=None, interactive=True, ylog=True): """ Function to fit the distribution of charge cluster DN values from a Fe55 dataset. A two Gaussian model of Mn K-alpha and K-beta lines is assumed with the ratio between the K-alpha and K-beta energies fixed at 5.889/6.49 and the the Gaussian width of the lines set equal. The gain (Ne/DN), location and sigma of the K-alpha peak (in units of DN) are returned as a tuple. If make_plot=True, then a matplotlib plot of the distribution and fit is displayed. If xrange is not None, then that 2-element tuple is used as the histogram x-range. If xrange is None, then the histogram x-range is set to +/- hist_nsig*clipped_stdev about the median of the signal distribution. """ flags = afwMath.MEDIAN | afwMath.STDEVCLIP try: stats = afwMath.makeStatistics(signals.tolist(), flags) except: print(signals) raise median = stats.getValue(afwMath.MEDIAN) stdev = stats.getValue(afwMath.STDEVCLIP) if xrange is None: # Set range of histogram to include both Kalpha and Kbeta peaks. xmin = max(median - hist_nsig * stdev, 200) xmax = min(median * 1785. / 1620. + hist_nsig * stdev, 1000) xrange = xmin, xmax # Save pylab interactive state. pylab_interactive_state = pylab.isinteractive() # Determine distribution mode and take that as the location of the # Kalpha peak hist = np.histogram(signals, bins=bins, range=xrange) xpeak = hist[1][np.where(hist[0] == max(hist[0]))][0] xrange = max(0, xpeak - 200), xpeak * 1785. / 1620. + 200 hist = np.histogram(signals, bins=bins, range=xrange) yrange = 1, max(hist[0]) * 1.5 if make_plot: if interactive: pylab.ion() else: pylab.ioff() # fig = pylab.figure() # axes = fig.add_subplot(111) win = plot.Window() hist = pylab.hist(signals, bins=bins, range=xrange, histtype='bar', color='b', log=ylog) if ylog: plot.setAxis(xrange, yrange) else: pylab.ioff() # hist = np.histogram(signals, bins=bins, range=xrange) x = (hist[1][1:] + hist[1][:-1]) / 2. y = hist[0] ntot = sum(y) # # Starting values for two Gaussian fit. The relative # normalizations are initially set at the expected line ratio # of K-alpha/K-beta = 0.88/0.12. The relative peak locations # and relative widths are fixed in fe55_lines(...) above. # p0 = (ntot * 0.88, median, stdev / 2., ntot * 0.12) pars, _ = scipy.optimize.curve_fit(fe55_lines, x, y, p0=p0) kalpha_peak, kalpha_sigma = pars[1], pars[2] fe55_yield = Fe55Yield(ccdtemp) gain = fe55_yield.alpha()[0] / kalpha_peak if make_plot: pylab.xlabel('Bias Corrected Event Signal (DN)') pylab.ylabel('Entries / bin') xx = np.linspace(x[0], x[-1], 1000) pylab.plot(xx, fe55_lines(xx, *pars), 'r--', markersize=3, linewidth=1) pylab.annotate(("K-alpha peak = %i DN\n\n" + "Gain = %.2f e-/DN\n\n") % (kalpha_peak, gain), (0.5, 0.7), xycoords='axes fraction') win.set_title(title) if plot_filename is not None: pylab.savefig(plot_filename) # Restore pylab interactive state. pylab.interactive(pylab_interactive_state) return gain, kalpha_peak, kalpha_sigma
# -*- coding: utf-8 -*- """ Created on Thu Sep 13 09:28:40 2018 @author: geih """ # Initialize the model and defining default options import numpy as np import neuron nrn = neuron.h import uncertainpy as un import pylab as plt plt.interactive(1) plt.show() def create_soma( g_l=2e-5, e_pas=-45, g_K=4.18e-4, pcabar_ihva=0.2e-3, g_SK=4e-4, g_BK=3.13e-4, gbar_naxm=2.19e-2, tau_BK=3, tau_K=5, ):
#!/usr/bin/env python """ This is a scripts allowing the user to specify the path to a morphology file, and determine the rotation angles that will rotate the apical dendrite along the vertical z-axis. If desired, it will create a .rot-file that LFPy will automatically use to set the default rotation alongside the morphology. """ # import some stuff import pylab as pl import LFPy import os # plot will pop up by itself pl.interactive(1) """ Define some functions for plotting """ def plot_linepiece(ax, cell, i, color): ax.plot( [cell.xstart[i], cell.xend[i]], [cell.ystart[i], cell.yend[i]], [cell.zstart[i], cell.zend[i]], color=color, lw=cell.diam[i], )
save_bases = False save_ws = False debug_data = {} cTrial=1 cStep=0 ################ TRAJECTORY ANALISYS breakCounter=0 # nTrials=2 # breaksxTrial=[np.zeros(nTrials)] proximity6=0 # leastProximityxTrial=[np.zeros(nTrials)] pl.ion() pl.interactive(True) # fig=pl.figure(figsize=(10,6)) # pl.subplots_adjust(hspace=.7) # pl.subplot(1,1,1) # # pl.title("Breaks per Trial") # line1, = pl.plot(np.arange(0,nTrials),'*') # pl.xlim([0,nTrials]) # pl.ylim([0,100]) # pl.subplot(2,1,2) # pl.title("IR izquierdo") # line2, = pl.plot(np.arange(0,nTrials)) #Saving first CS-US CS_US_1 = {}
Inspired from opencv_source_code/samples/python2/lk_track.py ----- Author : Romain Trachel <*****@*****.**> Date : 02/19/2015 ''' import numpy as np import cv2 import time from scipy import interpolate, signal, fftpack, optimize from sklearn.decomposition import PCA import pylab as plt plt.interactive(True) # hard coded parameters (beark... some of them need to be passed into init) # parameters of the Lucas-Kanade optical flow algorithm lk_params = dict( winSize = (35, 35), maxLevel = 2, criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03)) # parameters of the feature tracking algorithm feature_params = dict(maxCorners = 500, qualityLevel = 0.35, # decrease sensitivity minDistance = 7, blockSize = 7 ) # parameters of the face tracking algorithm face_params = dict(scaleFactor=1.1,
def plot(self, xrange=None, interactive=False, bins=100, win=None, subplot=(1, 1, 1), figsize=None, add_labels=False, frameLabels=False, amp=1, title=''): pylab_interactive_state = pylab.isinteractive() pylab.interactive(interactive) if win is None: if frameLabels: xlabel = 'Bias Corrected Event Signal (DN)' ylabel = 'Entries / bin' else: xlabel, ylabel = None, None win = plot.Window(subplot=subplot, figsize=figsize, xlabel=xlabel, ylabel=ylabel, size='large') else: win.select_subplot(*subplot) if frameLabels: bbox = win.axes[-1].get_position() points = bbox.get_points() points[0] += 0.025 points[1] += 0.025 bbox.set_points(points) win.axes[-1].set_position(bbox) if xrange is not None: self.xrange = xrange logscale = True if max(self.signals) <= 0: logscale = False try: hist = pylab.hist(self.signals, bins=bins, range=self.xrange, histtype='bar', color='b', log=logscale) yrange = 1, max(hist[0]) * 1.5 plot.setAxis(self.xrange, yrange) except: return win if add_labels: pylab.xlabel('Bias Corrected Event Signal (DN)') pylab.ylabel('Entries / bin') x = (hist[1][1:] + hist[1][:-1]) / 2. xx = np.linspace(x[0], x[-1], 1000) pylab.plot(xx, fe55_lines(xx, *self.pars), 'r--', markersize=3, linewidth=1) pylab.annotate(("Amp %i\nGain=%.2f e-/DN") % (amp, self.gain), (0.475, 0.8), xycoords='axes fraction', size='x-small') pylab.interactive(pylab_interactive_state) return win
#!/usr/bin/env pytho import sys,os,re import pylab as p p.interactive(True) def mm_array(x,y): return p.array([x,y])/1000.0 def twoDang(ang ): if isinstance(ang,p.ndarray): ang = p.arctan2(ang[1],ang[0]) return ang def hypleg(x,y=None): if y is None and isinstance(x,p.ndarray): x,y=x return p.sqrt(p.fabs(p.norm(x)**2-p.norm(y)**2)) def twoDmag_ang(mag,ang): mag = p.norm(mag) ang = twoDang(ang) return p.array([mag*p.cos(ang),mag*p.sin(ang)]) def propkin( pp ): pa = p.array([p.array(x) for x in pp]) ppt = [p.sum(pa[:i+1],0) for i in range(len(pa))] pxy = p.array(zip(*ppt)) #print pxy return pxy
Global plots for the LSST white paper. """ import os import os.path as op import logging logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) from exceptions import ValueError from argparse import ArgumentParser import numpy as np import matplotlib matplotlib.use('Agg') import pylab as pl pl.interactive(0) import healpy as hp from saunerie import psf #CADENCE_FILES = ['alt_sched.npy', 'alt_sched_rolling.npy', 'feature_baseline_10yrs.npy', # 'feature_rolling_half_mask_10yrs.npy', 'feature_rolling_twoThird_10yrs.npy', # 'minion_1016.npy'] #CADENCE_SHORT_NAMES = ['AltSched', 'AltSchedRolling', 'FeatureBaseline', # 'FeatureRolling1/2', 'FeatureRolling2/3', # 'Minion'] def _savefig(fig, filename): dirname = op.dirname(filename) if not op.isdir(dirname):
def __init__(self,x,y,xcen,ycen,d=100e-3,mask=None,gainImg=None,darkImg=None,tx=0,ty=0, qbin=5e-3,lam=1,\ ADU_per_photon = 1.,Pplane=0,phibin=0.1,phiBins=1,img=None,verbose=0,report_file=None): """ correctedImage = (Image-darkImg)/gainImg/geom_correction/pol_correction x,y = pixel coordinate (1D array each); note: they should be the center of the pixels xcen,ycen = center beam position tx,ty = angle of detector normal with respect to incoming beam (in deg) zeros are for perpendicular configuration darkImg = darkImage to subbract ADU_per_photon : used to estimate errors qbin = rebinning q phibin = bin in azimuthal angle (used for polar plot Pplane = Polarization (1 = horizontal, 0 = vertical) d = distance of center of detector to sample (in m) lam = wavelength in Ang img is used only for displaying corrections """ # save parameters for later use self.gainImg=gainImg self.darkImg=darkImg if mask is not None: mask = np.asarray(mask,dtype=np.bool) self.mask=mask self.verbose=verbose self.ADU_per_photon=ADU_per_photon tx = np.deg2rad(tx) ty = np.deg2rad(ty) xcen = float(xcen) ycen = float(ycen) # equations based on J Chem Phys 113, 9140 (2000) [logbook D30580, pag 71] (A,B,C) = (-np.sin(ty)*np.cos(tx),-np.sin(tx),-np.cos(ty)*np.cos(tx)) (a,b,c) = (xcen+d*np.tan(ty),float(ycen)-d*np.tan(tx),d) self.xcen = xcen self.ycen = ycen mshape = x.shape r = np.sqrt( (x-a)**2+(y-b)**2+c**2) self.r = r self.d = d self.msg("calculating theta...",cr=0) matrix_theta = np.arccos( (A*(x-a)+B*(y-b)-C*c )/r ) self.matrix_theta = matrix_theta self.msg("...done") self.msg("calculating phi...",cr=0) matrix_phi = np.arccos( ((A**2+C**2)*(y-b)-A*B*(x-a)+B*C*c )/ \ np.sqrt((A**2+C**2)*(r**2-(A*(x-a)+B*(y-b)-C*c)**2))) idx = (y>ycen) & (np.isnan(matrix_phi)) matrix_phi[idx] = 0 idx = (y<ycen) & (np.isnan(matrix_phi)) matrix_phi[idx] = np.pi idx = (x<xcen) matrix_phi[idx] = (np.pi-matrix_phi[idx])+np.pi # matrix_phi[idx] = temp+n.pi self.matrix_phi = matrix_phi self.msg("...done") self.msg("calculating pol matrix...",cr=0) Pout = 1-Pplane pol = Pout*(1-(np.sin(matrix_phi)*np.sin(matrix_theta))**2)+\ Pplane*(1-(np.cos(matrix_phi)*np.sin(matrix_theta))**2) self.msg("... done") self.pol=pol theta_max = np.nanmax(matrix_theta[~mask]) self.msg("calculating digitize") self.nphi = phiBins #if phiBins > 1: phiint = 2*np.pi/phiBins pbm = self.matrix_phi + phiint/2 pbm[pbm>=2*np.pi] -= 2*np.pi self.phiVec = np.linspace(0,2*np.pi+np.spacing(np.min(pbm)),phiBins+1) self.idxphi = np.digitize(pbm.ravel(),self.phiVec)-1 self.matrix_q = 4*np.pi/lam*np.sin(self.matrix_theta/2) q_max = np.nanmax(self.matrix_q[~mask]) qbin = np.array(qbin) if qbin.size==1: self.qbins = np.arange(0,q_max+qbin,qbin) else: self.qbins = qbin self.q = (self.qbins[0:-1]+self.qbins[1:])/2 self.theta = 2*np.arcsin(self.q*lam/4/np.pi) self.nq = self.q.size self.idxq = np.digitize(self.matrix_q.ravel(),self.qbins)-1 last_idx = self.idxq.max() self.idxq[mask.ravel()] = 0; # send the masked ones in the first bin # 2D binning! self.Cake_idxs = np.ravel_multi_index((self.idxphi,self.idxq),(self.nphi,self.nq)) self.Cake_idxs[mask.ravel()] = 0; # send the masked ones in the first bin #print "last index",last_idx self.msg("...done") #self.phi = np.arange(0,2*np.pi+phibin,phibin)+phibin/2 self.phi = self.phiVec[:-1] # include geometrical corrections geom = (d/r) ; # pixels are not perpendicular to scattered beam geom *= (d/r**2); # scattered radiation is proportional to 1/r^2 self.msg("calculating normalization...",cr=0) self.geom = geom self.geom /= self.geom.max() self.correction = self.geom*self.pol self.Npixel = np.bincount(self.idxq,minlength=self.nq); self.Npixel = self.Npixel[:self.nq] self.norm = self.Npixel self.Cake_Npixel = np.bincount(self.Cake_idxs,minlength=self.nq*self.nphi) #self.Cake_Npixel = self.Npixel[:self.nq*self.nphi] self.Cake_norm=np.reshape(self.Cake_Npixel,(self.nphi,self.nq));#/self.correction1D #self.correction1D =self.correction1D[:self.nq]/self.Npixel self.header = "# Parameters for data reduction\n" self.header += "# xcen,ycen = %.2f m %.2f m\n" % (xcen,ycen) self.header += "# sample det distance = %.4f m\n" % (d) self.header += "# wavelength = %.4f Ang\n" % (lam) self.header += "# detector angles x,y = %.3f,%.3f deg\n" % (np.rad2deg(tx),np.rad2deg(ty)) self.header += "# fraction of inplane pol %.3f\n" % (Pplane) if isinstance(qbin,float): self.header += "# q binning : %.3f Ang-1\n" % (qbin) return if report_file is None: return else: # prepare report if (img is None): img=np.ones_like(mask) plt.interactive(0) plt.figure(figsize=(8*2, 6*2),dpi=150) plt.subplot("231",title="Polarization") plt.imshow(self.pol) plt.colorbar() plt.subplot("232",title="Geometrical") plt.imshow(self.geom) plt.colorbar() plt.subplot("233",title="Geometrical+Pol") plt.imshow(self.correction) plt.colorbar() plt.subplot("234",title="Raw image") plt.imshow(img*mask) plt.colorbar() plt.subplot("235",title="Corrected image") plt.imshow(img/self.correction*mask) plt.colorbar() # plt.show() if (report_file == "auto"): report_file="azimuthal_averaging_info.png" plt.savefig(report_file) self.msg("...done")
def toggle_interactive(self): pylab.interactive(not pylab.isinteractive())
def error_report(clf, X, y, y_scores=None, ind=None, spec_func=None): """Generate error report as a multi page pdf. This functions plots the ROC curve of ``clf`` and spectrograms for the top ``k`` false negatives, false positives, true positives, and true negatives. Parameters ---------- clf : BaseEstimator A trained classifier X : ndarray A data array, used to generate the spectrograms (using ``spec_func``) and optionally ``y_scores``. """ if y_scores is None: if hasattr(clf, "decision_function"): y_scores = clf.decision_function(X) else: y_scores = clf.predict_proba(X)[:, 1] if ind is None: ind = np.arange(X.shape[0]) plt.interactive(False) signature = hashlib.md5(repr(clf)).hexdigest() fname = "error_report_%s.pdf" % signature pdf = PdfPages(fname) # frontpage fig = plt.figure(figsize=(8.27, 11.69)) fig.text(0.5, 0.9, "Error Report", horizontalalignment="center", size=20) fig.text(0.5, 0.75, str(datetime.now()), horizontalalignment="center", size=12) fig.text(0.5, 0.5, pprint.pformat(clf), horizontalalignment="center", size=10) plt.savefig(pdf, format="pdf") plt.close() # roc curve print ("_" * 80) print "roc curve" print fig = plt.figure(figsize=(8.27, 8.27)) _plot_roc(y, y_scores, fig.gca()) plt.savefig(pdf, format="pdf") plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type="fp", k=20) plt.savefig(pdf, format="pdf") plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type="fn", k=20) plt.savefig(pdf, format="pdf") plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type="tp", k=20) plt.savefig(pdf, format="pdf") plt.close() fig = plt.figure(figsize=(8.27, 8.27)) _plot_errors(X, ind, y, y_scores, pdf, spec_func=None, type="tn", k=20) plt.savefig(pdf, format="pdf") plt.close() pdf.close() plt.interactive(True)
#!/usr/bin/python3 import serial import contextlib import numpy import pylab import struct import threading HEADER = b"-=-=-=-=\r\n" FIELD_DELIMITER = ',' FIELD_DEF_DELIMITER = ':' DATAPOINTS = 20000 # number of datapoints on graph pylab.interactive(True) colors = iter('rgbcmyk') class Field(object): def __init__(self, field_def, parent): self.name, self.fmt, self._type = field_def.split(FIELD_DEF_DELIMITER) self.size = struct.calcsize(self.fmt) self.parent = parent self.overflow_count = 0 self.last_value = 0 self.line = None # set in create_series (need to clean this up) self.create_series() @property def max_value(self): # does not take signed into account, because rollover doesn't make sense
# -*- coding: utf-8 -*- """ Created on Sun Jun 12 14:28:55 2016 @author: ericgrimson """ import pylab as plt plt.interactive(False) def retire(monthly, rate, terms): savings = [0] base = [0] monthlyRate = rate / 12 for i in range(terms): base += [i] savings += [savings[-1] * (1 + monthlyRate) + monthly] return base, savings def displayRetirementWithMonthlies(monthlies, rate, terms): plt.figure('retireMonth') plt.clf() for monthly in monthlies: xvals, yvals = retire(monthly, rate, terms) plt.plot(xvals, yvals, label='retire:' + str(monthly)) plt.legend(loc='upper left')
def test_smoke(): import pylab as pl pl.interactive(False) meat = pd.read_csv(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'exampledata', 'meat.csv')) meat['date'] = pd.to_datetime(meat.date) df = pd.DataFrame({ "x": np.arange(0, 100), "y": np.arange(0, 100), "z": np.arange(0, 100) }) df['cat'] = np.where(df.x*2 > 50, 'blah', 'blue') df['cat'] = np.where(df.y > 50, 'hello', df.cat) df['cat2'] = np.where(df.y < 15, 'one', 'two') df['y'] = np.sin(df.y) gg = ggplot(aes(x="x", y="z", color="cat", alpha=0.2), data=df) gg = ggplot(aes(x="x", color="c"), data=pd.DataFrame({"x": np.random.normal(0, 1, 10000), "c": ["blue" if i%2==0 else "red" for i in range(10000)]})) #print gg + geom_density() + xlab("x label") + ylab("y label") gg = ggplot(aes(x="x", y="y", shape="cat2", color="cat"), data=df) #print gg + geom_point() + facet_wrap(x="cat", y="cat2") #print gg + geom_point() + facet_wrap(y="cat2") + ggtitle("My Single Facet") #print gg + stat_smooth(color="blue") + ggtitle("My Smoothed Chart") #print gg + geom_hist() + ggtitle("My Histogram") #print gg + geom_point() + geom_vline(x=50, ymin=-10, ymax=10) #print gg + geom_point() + geom_hline(y=50, xmin=-10, xmax=10) df['z'] = df['y'] + 100 gg = ggplot(aes(x='x', ymax='y', ymin='z'), data=df) #print gg + geom_bar() + facet_wrap(x="cat2") #print gg + geom_area() + facet_wrap(x="cat2") gg = ggplot(aes(x='x', ymax='y', ymin='z', color="cat2"), data=df) #print gg + geom_area() df['x'] = np.random.randint(0, 10, 100) df['y'] = np.random.randint(0, 10, 100) gg = ggplot(aes(x='x', y='y', shape='cat', color='cat2'), data=df) #print df.head() #print gg + geom_point() #print gg + stat_bin2d() #print ggplot(aes(x='mpg', fill=True, alpha=0.3), data=mtcars) + \ # geom_density() #plt.show(block=True) #p = ggplot(mtcars, aes(x='wt', y='mpg', colour='factor(cyl)', size='mpg', linetype='factor(cyl)')) #print p + geom_line() + geom_point() # p + geom_point() + geom_line(color='lightblue') + ggtitle("Beef: It's What's for Dinner") + xlab("Date") + ylab("Head of Cattle Slaughtered") meat_lng = pd.melt(meat[['date', 'beef', 'broilers', 'pork']], id_vars=['date']) meat_lng = pd.melt(meat, id_vars=['date']) p = ggplot(aes(x='date', y='value', colour='variable', fill=True, alpha=0.3), data=meat_lng) #print p + geom_density() + facet_wrap("variable") #print(p + geom_line() + facet_wrap("variable")) plt.show(1) # ggsave(p + geom_density(), "densityplot.png") p = ggplot(aes(x="date", y="value", colour="variable", shape="variable"), meat_lng) #print p + geom_point() + facet_grid(y="variable") p = p + stat_smooth(se=False) + geom_point() p = ggplot(aes(x='date', y='beef'), data=meat) # print p + geom_point() + stat_smooth(se=True) #p = ggplot(aes(x='x', y='y', colour='z'), data=diamonds.head(4)) #print p + geom_point() + \ # scale_colour_gradient(low="white", high="red") + \ # facet_wrap("cut") #plt.show(block=True) #p = ggplot(aes(x='x', y='y', colour='z'), data=diamonds.head(1000)) #print p + geom_point() + \ # scale_colour_gradient(low="white", high="red") + \ # facet_grid("cut", "clarity") #plt.show(block=True) #p = ggplot(aes(x='date', y='beef'), data=meat) #print p + geom_point() + scale_x_continuous("This is the X") + scale_y_continuous("Squared", limits=[0, 1500]) #print p + geom_point() + ylim(0, 1500) #gg = ggplot(aes(x='date', y='beef'), data=meat) #print gg + stat_smooth(se=True) #print ggplot(aes(x='date', y='beef'), data=meat) + geom_line() + \ # scale_x_date(labels="%Y-%m-%d") #plt.show(block=True) #p = ggplot(aes(x='carat'), data=diamonds) #print p + geom_now_its_art() #print p + geom_density() + facet_grid("cut", "clarity") #plt.show(block=True) p = ggplot(aes(x='factor(cyl)'), data=mtcars) #print(p + geom_bar()) plt.show(block=True) #ggsave(p + geom_bar(), "public/img/mtcars_geom_bar_cyl.png") p = ggplot(aes(x='date_hour', y='pageviews'), data=pageviews) #print(p + geom_point()) plt.show(1)
import serial import pylab as py import numpy as np serialPort = '/dev/ttyUSB0' baudRate = 9600 fPath = 'output.csv' py.interactive(True) outFile = open(fPath, 'w') ser = serial.Serial(serialPort, baudRate) data = [] number = np.array(range(0, 1000)) i = 0 while True: i += 1 line = ser.readline() try: line = line.decode() except UnicodeDecodeError: print "decode", line continue #outFile.write(line) try: #line=int(line) data.append(int(line)) if int(line) > 500: print line except ValueError: print line continue
from __future__ import division from scipy.stats import norm from neuron import h, load_mechanisms from numpy import trapz import matplotlib.pyplot as plt cvode = h.CVode() cvode.active(1) #cvode.maxstep(0.2) h.load_file('stdlib.hoc') import pylab pylab.interactive(1) import matplotlib.cm as cm celsius = 36.0 # temperature Epas = -70.6 # reversal potential for leakage current ## SELECT MORPHOLOGY h("forall delete_section()") h("sec_counted=0") h.load_file(1,"Morf_default.hoc") # Default, from Halnes2011 h.load_file(1,"fixnseg.hoc") # Segmentize (technicality) ###################################################################### def test(): rall = 113 # axial resistance cap = 1.1 # membrane capacitance Rm = 45000.0 # membrane resistance ## INSERT ION CHANNELS: for sec in h.allsec(): sec.insert("pas") sec.e_pas = Epas
def time_series_intercomparison(conf_imd4, conf_gages, conf_trmm, conf_trmm_rt): """ """ tstart = dt.datetime.strptime("2001-04-01 00:00:00", "%Y-%m-%d %H:%M:%S") tend = dt.datetime.strptime("2010-12-30 00:00:00", "%Y-%m-%d %H:%M:%S") dtimes_imd4, _, imd4 = tl.echse.read_echse_data_file(conf_imd4["f_data"]) dtimes_trmm, _, trmm = tl.echse.read_echse_data_file(conf_trmm["f_data"]) dtimes_trmmrt, _, trmmrt = tl.echse.read_echse_data_file(conf_trmm_rt["f_data"]) dtimes_gage, _, gage = tl.echse.read_echse_data_file(conf_gages["f_data"]) ix_imd4 = (dtimes_imd4 >= tstart) & (dtimes_imd4 <tend) ix_trmm = (dtimes_trmm >= tstart) & (dtimes_trmm <tend) ix_trmmrt = (dtimes_trmmrt >= tstart) & (dtimes_trmmrt <tend) ix_gage = (dtimes_gage >= tstart) & (dtimes_gage <tend) lim = (0,300) txt = "Daily rainfall\nSubcatchment average\n%s to %s" % (tstart.strftime("%Y-%m-%d"),tend.strftime("%Y-%m-%d")) kwargs = {"edgecolor":"None", "alpha":0.05} plt.interactive(False) fig = plt.figure(figsize=(8,8)) ax = fig.add_subplot(221, aspect="equal") corr = "\nR=%.2f" % pearsonr(gage[ix_gage,:].ravel(), trmm[ix_trmm,:].ravel())[0] tl.vis.simple_scatter(gage[ix_gage,:], trmm[ix_trmm,:], "GAGE (mm)", "TRMM (mm)", lim, txt="GAGE vs. TRMM\n"+txt+corr, **kwargs ) ax = fig.add_subplot(222, aspect="equal") corr = "\nR=%.2f" % pearsonr(imd4[ix_imd4,:].ravel(), trmm[ix_trmm,:].ravel())[0] tl.vis.simple_scatter(imd4[ix_imd4,:], trmm[ix_trmm,:], "IMD4 (mm)", "TRMM (mm)", lim, txt="IMD4 vs. TRMM\n"+txt+corr, **kwargs) ax = fig.add_subplot(223, aspect="equal") corr = "\nR=%.2f" % pearsonr(gage[ix_gage,:].ravel(), imd4[ix_imd4,:].ravel())[0] tl.vis.simple_scatter(gage[ix_gage,:], imd4[ix_imd4,:], "GAGE (mm)", "IMD4 (mm)", lim, txt="GAGE vs. IMD4\n"+txt+corr, **kwargs) ax = fig.add_subplot(224, aspect="equal") corr = "\nR=%.2f" % pearsonr(trmm[ix_trmm,:].ravel(), trmmrt[ix_trmmrt,:].ravel())[0] tl.vis.simple_scatter(trmm[ix_trmm,:], trmmrt[ix_trmmrt,:], "TRMM (mm)", "TRMM RT (mm)", lim, txt="TRMM vs. TRMM RT\n"+txt+corr, **kwargs) plt.tight_layout() plt.savefig("P:/progress/mahanadi/_qpe/inter_product_scatter.png") plt.interactive(True) plt.figure(figsize=(12,12)) plt.subplot(311) plt.plot(dtimes_imd4[ix_imd4], medfilt( np.mean(imd4[ix_imd4,:],axis=1), 1 ), color="black", label="IMD4" ) plt.plot(dtimes_gage[ix_gage], medfilt( np.mean(gage[ix_gage,:],axis=1), 1 ), color="green", label="GAGE", alpha=0.7 ) plt.plot(dtimes_trmm[ix_trmm], medfilt( np.mean(trmm[ix_trmm,:],axis=1), 1 ), color="red", label="TRMM", alpha=0.5 ) plt.plot(dtimes_trmmrt[ix_trmmrt], medfilt( np.mean(trmmrt[ix_trmmrt,:],axis=1), 1 ), color="blue", label="TRMM RT", alpha=0.5 ) plt.xlabel("Year") plt.ylabel("Daily rainfall (mm)") plt.title("Unsmoothed") plt.legend() plt.subplot(312) plt.plot(dtimes_imd4[ix_imd4], medfilt( np.mean(imd4[ix_imd4,:],axis=1), 31 ), color="black", label="IMD4" ) plt.plot(dtimes_gage[ix_gage], medfilt( np.mean(gage[ix_gage,:],axis=1), 31 ), color="green", label="GAGE", alpha=0.7 ) plt.plot(dtimes_trmm[ix_trmm], medfilt( np.mean(trmm[ix_trmm,:],axis=1), 31 ), color="red", label="TRMM", alpha=0.5 ) plt.plot(dtimes_trmmrt[ix_trmmrt], medfilt( np.mean(trmmrt[ix_trmmrt,:],axis=1), 31 ), color="blue", label="TRMM RT", alpha=0.5 ) plt.xlabel("Year") plt.ylabel("Daily rainfall (mm)") plt.title("Smoothed with 31 day median filter") plt.subplot(313) plt.plot(dtimes_imd4[ix_imd4], medfilt( np.mean(imd4[ix_imd4,:],axis=1), 91 ), color="black", label="IMD4" ) plt.plot(dtimes_gage[ix_gage], medfilt( np.mean(gage[ix_gage,:],axis=1), 91 ), color="green", label="GAGE", alpha=0.7 ) plt.plot(dtimes_trmm[ix_trmm], medfilt( np.mean(trmm[ix_trmm,:],axis=1), 91 ), color="red", label="TRMM", alpha=0.5 ) plt.plot(dtimes_trmmrt[ix_trmmrt], medfilt( np.mean(trmmrt[ix_trmmrt,:],axis=1), 91 ), color="blue", label="TRMM RT", alpha=0.5 ) plt.xlabel("Year") plt.title("Smoothed with 91 day median filter") plt.tight_layout() plt.savefig("P:/progress/mahanadi/_qpe/inter_product_timeseries.png")
def test_smoke(): import pylab as pl pl.interactive(False) meat = pd.read_csv( os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', 'exampledata', 'meat.csv')) meat['date'] = pd.to_datetime(meat.date) df = pd.DataFrame({ "x": np.arange(0, 100), "y": np.arange(0, 100), "z": np.arange(0, 100) }) df['cat'] = np.where(df.x * 2 > 50, 'blah', 'blue') df['cat'] = np.where(df.y > 50, 'hello', df.cat) df['cat2'] = np.where(df.y < 15, 'one', 'two') df['y'] = np.sin(df.y) gg = ggplot(aes(x="x", y="z", color="cat", alpha=0.2), data=df) gg = ggplot(aes(x="x", color="c"), data=pd.DataFrame({ "x": np.random.normal(0, 1, 10000), "c": ["blue" if i % 2 == 0 else "red" for i in range(10000)] })) #print gg + geom_density() + xlab("x label") + ylab("y label") gg = ggplot(aes(x="x", y="y", shape="cat2", color="cat"), data=df) #print gg + geom_point() + facet_wrap(x="cat", y="cat2") #print gg + geom_point() + facet_wrap(y="cat2") + ggtitle("My Single Facet") #print gg + stat_smooth(color="blue") + ggtitle("My Smoothed Chart") #print gg + geom_hist() + ggtitle("My Histogram") #print gg + geom_point() + geom_vline(x=50, ymin=-10, ymax=10) #print gg + geom_point() + geom_hline(y=50, xmin=-10, xmax=10) df['z'] = df['y'] + 100 gg = ggplot(aes(x='x', ymax='y', ymin='z'), data=df) #print gg + geom_bar() + facet_wrap(x="cat2") #print gg + geom_area() + facet_wrap(x="cat2") gg = ggplot(aes(x='x', ymax='y', ymin='z', color="cat2"), data=df) #print gg + geom_area() df['x'] = np.random.randint(0, 10, 100) df['y'] = np.random.randint(0, 10, 100) gg = ggplot(aes(x='x', y='y', shape='cat', color='cat2'), data=df) #print df.head() #print gg + geom_point() #print gg + stat_bin2d() #print ggplot(aes(x='mpg', fill=True, alpha=0.3), data=mtcars) + \ # geom_density() #plt.show(block=True) #p = ggplot(mtcars, aes(x='wt', y='mpg', colour='factor(cyl)', size='mpg', linetype='factor(cyl)')) #print p + geom_line() + geom_point() # p + geom_point() + geom_line(color='lightblue') + ggtitle("Beef: It's What's for Dinner") + xlab("Date") + ylab("Head of Cattle Slaughtered") meat_lng = pd.melt(meat[['date', 'beef', 'broilers', 'pork']], id_vars=['date']) meat_lng = pd.melt(meat, id_vars=['date']) p = ggplot(aes(x='date', y='value', colour='variable', fill=True, alpha=0.3), data=meat_lng) #print p + geom_density() + facet_wrap("variable") #print(p + geom_line() + facet_wrap("variable")) plt.show(1) # ggsave(p + geom_density(), "densityplot.png") p = ggplot(aes(x="date", y="value", colour="variable", shape="variable"), meat_lng) #print p + geom_point() + facet_grid(y="variable") p = p + stat_smooth(se=False) + geom_point() p = ggplot(aes(x='date', y='beef'), data=meat) # print p + geom_point() + stat_smooth(se=True) #p = ggplot(aes(x='x', y='y', colour='z'), data=diamonds.head(4)) #print p + geom_point() + \ # scale_colour_gradient(low="white", high="red") + \ # facet_wrap("cut") #plt.show(block=True) #p = ggplot(aes(x='x', y='y', colour='z'), data=diamonds.head(1000)) #print p + geom_point() + \ # scale_colour_gradient(low="white", high="red") + \ # facet_grid("cut", "clarity") #plt.show(block=True) #p = ggplot(aes(x='date', y='beef'), data=meat) #print p + geom_point() + scale_x_continuous("This is the X") + scale_y_continuous("Squared", limits=[0, 1500]) #print p + geom_point() + ylim(0, 1500) #gg = ggplot(aes(x='date', y='beef'), data=meat) #print gg + stat_smooth(se=True) #print ggplot(aes(x='date', y='beef'), data=meat) + geom_line() + \ # scale_x_date(labels="%Y-%m-%d") #plt.show(block=True) #p = ggplot(aes(x='carat'), data=diamonds) #print p + geom_now_its_art() #print p + geom_density() + facet_grid("cut", "clarity") #plt.show(block=True) p = ggplot(aes(x='factor(cyl)'), data=mtcars) #print(p + geom_bar()) plt.show(block=True) #ggsave(p + geom_bar(), "public/img/mtcars_geom_bar_cyl.png") p = ggplot(aes(x='date_hour', y='pageviews'), data=pageviews) #print(p + geom_point()) plt.show(1)
#/usr/bin/env python # -*-coding:utf-8 -*- from mpl_toolkits.mplot3d import Axes3D from mpl_toolkits.mplot3d.art3d import Poly3DCollection import numpy as np from pylab import interactive from matplotlib.patches import FancyArrowPatch from scipy.optimize import fmin as simplex from numpy import linalg as LA import math interactive(True)