def add_sub_plot(sub_num):

	plt.subplot(1,1,1)
	
	rbf = scipy.interpolate.Rbf(x, y, z, function='linear')
	zi = rbf(xi, yi)

	#io parameter interpolation
	rbf2 = scipy.interpolate.Rbf(xio, yio, zio, function='linear')
	zion = rbf2(xion, yion)

	#our contours. Teal for .2 dex and black for 1 dex
	contour = plt.contour(xi,yi,zi, levels, colors='r', linestyles = 'dashed')
	contour2 = plt.contour(xi,yi,zi, levels2, colors='k', linewidths=1.5)
	contour3 = plt.contour(xion,yion, zion, levelsio, colors='c', linewidths = 3) 

	plt.clabel(contour2, inline=1, fontsize=10, fmt='%1.1f')
	plt.clabel(contour3, inline=1, fontsize=10, fmt='%1.1f')

	#print the max values right on the plots
	#plt.scatter(max_values[0,2], max_values[0,3], c ='k',marker = '*', s = 1000)
	#plt.annotate(max_values[0,0], xy= (max_values[0,2], max_values[0,3]), xytext = (5, -30), textcoords = 'offset points', ha = 'left', va = 'bottom', fontsize=15)
	#axis limits
	yt_min = 8
	yt_max = 17
	xt_min = 0
	xt_max = 6
	plt.ylim(yt_min,yt_max)
	plt.xlim(xt_min,xt_max) 
	plt.yticks(arange(yt_min,yt_max+1,1),fontsize=16)
	plt.xticks(arange(xt_min,xt_max+1,1), fontsize = 16)
	plt.ylabel('Log ($ \phi  _{\mathrm{H}}  $)', fontsize=18)
	plt.xlabel('Log($n _{\mathrm{H}}  $)', fontsize=18)
def do_plot(mode, content, wide):
	global style
	style.apply(mode, content, wide)

	data = np.load("data/prr_AsAu_%s%s.npz"%(content, wide))

	AU, TAU = np.meshgrid(-data["Au_range_dB"], data["tau_range"])
	Zu = data["PRR_U"]
	Zs = data["PRR_S"]

	assert TAU.shape == AU.shape == Zu.shape, "The inputs TAU, AU, PRR_U must have the same shape for plotting!"

	plt.clf()

	if mode in ("sync",):
		# Plot the inverse power ratio, sync signal is stronger for positive ratios
		CSf = plt.contourf(TAU, AU, Zs, levels=(0.0, 0.2, 0.4, 0.6, 0.8, 0.9, 1.0), colors=("1.0", "0.75", "0.5", "0.25", "0.15", "0.0"), origin="lower")
		CS2 = plt.contour(CSf, colors = ("r",)*5+("w",), linewidths=(0.75,)*5+(1.0,), origin="lower", hold="on")
	else:
		CSf  = plt.contourf(TAU, AU, Zs, levels=(0.0, 0.2, 0.4, 0.6, 0.8, 0.9, 1.0), colors=("1.0", "0.75", "0.5", "0.25", "0.15", "0.0"), origin="lower")
		CS2f = plt.contour(CSf, levels=(0.0, 0.2, 0.4, 0.6, 0.8, 1.0), colors=4*("r",)+("w",), linewidths=(0.75,)*4+(1.0,), origin="lower", hold="on")
		#CS2f = plt.contour(TAU, -AU, Zu, levels=(0.9, 1.0), colors=("0.0",), linewidths=(1.0,), origin="lower", hold="on")
		if content in ("unif",):
			CSu  = plt.contourf(TAU, AU, Zu, levels=(0.2, 1.0), hatches=("////",), colors=("0.75",), origin="lower")
			CS2  = plt.contour(CSu, levels=(0.2,), colors = ("r",), linewidths=(1.0,), origin="lower", hold="on")

	style.annotate(mode, content, wide)

	plt.axis([data["tau_range"][0], data["tau_range"][-1], -data["Au_range_dB"][-1], -data["Au_range_dB"][0]])

	plt.ylabel(r"Signal power ratio ($\mathrm{SIR}$)", labelpad=2)
	plt.xlabel(r"Time offset $\tau$ ($/T$)", labelpad=2)

	plt.savefig("pdf/prrc2_%s_%s%s_z.pdf"%(mode, content, wide))
Esempio n. 3
0
	def __call__(self,bz,b2):

		phi = np.zeros_like(bz)
		dx = self.sim.xmx / self.sim.nx

		for i in range(1,self.sim.nx-1):
			phi[:,:,i] = phi[:,:,i-2] - 2 * dx * bz[:,:,i-1]

		if(self.focused == True):
			centerz  = np.unravel_index(np.argmax(b2),b2.shape)[0]
			width = phi.shape[2];
			if(centerz < width/2+1):
				phi = phi[0:width,:,:]
			else:
				phi = phi[centerz-width/2:centerz+width/2,:,:]

		phi = phi - phi.min()
		m = np.log(np.max(phi))
		levels = np.arange(m,m-3,-3./30)
		plt.contour(np.log(phi[:,0,:]),levels)

		#plt.imshow(phi[:,0,:],origin=0)
		#plt.colorbar()
		
		plt.axis('tight')
def plot_likelihood_overspace(info, session, task_labels, colours, filepath=None):
    for task_label in task_labels:
        zones = getattr(session, task_label).zones
        likelihood = np.nanmean(np.array(getattr(session, task_label).likelihoods), axis=(0, 1))

        likelihood[np.isnan(likelihood)] = 0

        xx, yy = np.meshgrid(info.xedges, info.yedges)
        xcenters, ycenters = get_bin_centers(info)
        xxx, yyy = np.meshgrid(xcenters, ycenters)

        maze_highlight = "#fed976"
        plt.plot(session.position.x, session.position.y, ".", color=maze_highlight, ms=1, alpha=0.2)
        pp = plt.pcolormesh(xx, yy, likelihood, cmap='bone_r')
        for label in ["u", "shortcut", "novel"]:
            plt.contour(xxx, yyy, zones[label], levels=0, linewidths=2, colors=colours[label])
        plt.colorbar(pp)
        plt.axis('off')

        plt.tight_layout()
        if filepath is not None:
            filename = info.session_id + "_" + task_label + "_likelihoods-overspace.png"
            plt.savefig(os.path.join(filepath, filename))
            plt.close()
        else:
            plt.show()
def plot_haxby(activation, title):
    z = 25

    fig = plt.figure(figsize=(4, 5.4))
    fig.subplots_adjust(bottom=0., top=1., left=0., right=1.)
    plt.axis('off')
    # pl.title('SVM vectors')
    plt.imshow(mean_img[:, 4:58, z].T, cmap=pl.cm.gray,
              interpolation='nearest', origin='lower')
    plt.imshow(activation[:, 4:58, z].T, cmap=pl.cm.hot,
              interpolation='nearest', origin='lower')

    mask_house = nib.load(h.mask_house[0]).get_data()
    mask_face = nib.load(h.mask_face[0]).get_data()

    plt.contour(mask_house[:, 4:58, z].astype(np.bool).T, contours=1,
            antialiased=False, linewidths=4., levels=[0],
            interpolation='nearest', colors=['blue'], origin='lower')

    plt.contour(mask_face[:, 4:58, z].astype(np.bool).T, contours=1,
            antialiased=False, linewidths=4., levels=[0],
            interpolation='nearest', colors=['limegreen'], origin='lower')

    p_h = Rectangle((0, 0), 1, 1, fc="blue")
    p_f = Rectangle((0, 0), 1, 1, fc="limegreen")
    plt.legend([p_h, p_f], ["house", "face"])
    plt.title(title, x=.05, ha='left', y=.90, color='w', size=28)
Esempio n. 6
0
def plot_fgmax_grid():

    fg = fgmax_tools.FGmaxGrid()
    fg.read_input_data('fgmax_grid1.txt')
    fg.read_output()

    #clines_zeta = [0.01] + list(numpy.linspace(0.05,0.3,6)) + [0.5,1.0,10.0]
    clines_zeta = [0.001] + list(numpy.linspace(0.05,0.25,10))
    colors = geoplot.discrete_cmap_1(clines_zeta)
    plt.figure(1)
    plt.clf()
    zeta = numpy.where(fg.B>0, fg.h, fg.h+fg.B)   # surface elevation in ocean
    plt.contourf(fg.X,fg.Y,zeta,clines_zeta,colors=colors)
    plt.colorbar()
    plt.contour(fg.X,fg.Y,fg.B,[0.],colors='k')  # coastline

    # plot arrival time contours and label:
    arrival_t = fg.arrival_time/3600.  # arrival time in hours
    #clines_t = numpy.linspace(0,8,17)  # hours
    clines_t = numpy.linspace(0,2,5)  # hours
    #clines_t_label = clines_t[::2]  # which ones to label 
    clines_t_label = clines_t[::1]  # which ones to label 
    clines_t_colors = ([.5,.5,.5],)
    con_t = plt.contour(fg.X,fg.Y,arrival_t, clines_t,colors=clines_t_colors) 
    plt.clabel(con_t, clines_t_label)

    # fix axes:
    plt.ticklabel_format(format='plain',useOffset=False)
    plt.xticks(rotation=20)
    plt.gca().set_aspect(1./numpy.cos(fg.Y.mean()*numpy.pi/180.))
    plt.title("Maximum amplitude / arrival times (hrs)")
Esempio n. 7
0
    def region_images(self, save_name=None, percentile=80.):
      '''

      Creates saved PDF plots of several quantities/images.

      '''

      if self.verbose:
        pass
      else:
          threshold = scoreatpercentile(self.image[~np.isnan(self.image)], percentile)
          p.imshow(self.image, vmax=threshold, origin="lower", interpolation="nearest")
          p.contour(self.mask)
          p.title("".join([save_name," Contours at ", str(round(threshold))]))
          p.savefig("".join([save_name,"_filaments.pdf"]))
          p.close()

          ## Skeletons
          masked_image = self.image * self.mask
          skel_points = np.where(self.skeleton==1)
          for i in range(len(skel_points[0])):
              masked_image[skel_points[0][i],skel_points[1][i]] = np.NaN
          p.imshow(masked_image, vmax=threshold, interpolation=None, origin="lower")
          p.savefig("".join([save_name,"_skeletons.pdf"]))
          p.close()

      return self
def VisualizeData(X, mu, sigma):
    """
    plot the data in X along with the fit.
    Note that X is a two column vector
    :param X:
    :param mu:
    :param sigma:
    :return:
    """
    np.seterr(over='ignore')
    x1, x2 = np.meshgrid(np.arange(0, 35, 0.5), np.arange(0, 35, 0.5))
    z1 = np.asarray(x1).reshape(-1)

    mat = np.zeros([len(z1), 2])
    mat[:, 0] = np.asarray(x1).reshape(-1)
    mat[:, 1] = np.asarray(x2).reshape(-1)

    Z = MultivariateGaussian(mat, mu, sigma)
    Z = np.reshape(Z, np.shape(x1))

    x = [10 ** x for x in np.arange(-20, 0, 3)]

    plt.figure(1)

    plt.scatter(X[:, 0], X[:, 1], c=None, s=25, alpha=None, marker="+")

    plt.contour(x1, x2, Z, x)
    plt.show()
def steepVsConj():
    Q = np.array([[1.,0], [0,10.]])
    b = np.array([0.,0.])
    def f(pt):
        return .5*(pt*Q.dot(pt)).sum() - (b*pt).sum()
    #pts = [np.array([1.5,1.5]), np.array([1.,1.]), np.array([.5,.5])]
    x0 = np.array([5.,.5])
    pts = steepestDescent(Q, b, x0, niter=20)
    Q = np.array([[1.,0], [0,10.]])
    b = np.array([0.,0.])
    x0 = np.array([5.,.5])
    pts2 = conjugateGradient(Q, b, x0)
    dom = np.linspace(-5, 5, 100)
    X, Y = np.meshgrid(dom, dom)
    Z = np.empty(X.shape)
    for i in xrange(X.shape[0]):
        for j in xrange(X.shape[1]):
            pt = np.array([X[i,j], Y[i,j]])
            Z[i,j] = f(pt)
    vals = np.empty(len(pts))
    for i in xrange(len(pts)):
        vals[i] = f(pts[i])
    plt.contour(X,Y,Z, vals[:5], colors='gray')
    plt.plot(np.array(pts)[:,0],np.array(pts)[:,1], '*-')
    plt.plot(np.array(pts2)[:,0],np.array(pts2)[:,1], '*-')
    plt.savefig('steepVsConj.pdf')
    plt.clf()
def plot_mv_normal(mu, covarianceMatrix, xmin=-5, xmax=5, ymin=-5, ymax=5, resolution=50):
    X, Y = numpy.meshgrid(numpy.linspace(xmin, xmax, resolution),
                          numpy.linspace(ymin, ymax, resolution))

    # 2d array to store mv_normal pdf values
    pdfv = numpy.zeros(X.shape)

    # calculate pdf values at each X,Y location
    for i in range(X.shape[0]):
        for j in range(X.shape[1]):
            pdfv[i][j] = mv_normal(numpy.array([X[i][j], Y[i][j]]), mu, covarianceMatrix)

    # Contour plot
    plt.figure()
    plt.contour(X, Y, pdfv)
    plt.xlabel(r'$x$')
    plt.ylabel(r'$y$')

    # Surface plot
    fig = plt.figure()
    ax = fig.gca(projection='3d')
    surf = ax.plot_surface(X, Y, pdfv, rstride=1, cstride=1,
                           cmap=matplotlib.cm.jet,
                           linewidth=0, antialiased=False)
    plt.xlabel('$x$')
    plt.ylabel('$y$')
    fig.colorbar(surf)

    plt.show()
Esempio n. 11
0
def VisualizeFit(Xval, pval, epsilon, mu, sigma):
    """
    Visualize the fitter data
    :param Xval: the validation data set (only the first two columns are used)
    :param pval: A vector containing probabilities for example data in Xval
    :param mu: Estimate for the mean, using the training data
    :param sigma: Estimate for the variance, using the training data
    :return:
    """
    np.seterr(over='ignore')
    x1, x2 = np.meshgrid(np.arange(0, 35, 0.5), np.arange(0, 35, 0.5))
    z1 = np.asarray(x1).reshape(-1)

    mat = np.zeros([len(z1), 2])
    mat[:, 0] = np.asarray(x1).reshape(-1)
    mat[:, 1] = np.asarray(x2).reshape(-1)

    Z = MultivariateGaussian(mat, mu, sigma)
    Z = np.reshape(Z, np.shape(x1))

    x = [10 ** x for x in np.arange(-20, 0, 3)]

    plt.figure(1)

    plt.scatter(Xval[:, 0], Xval[:, 1], c=None, s=25, alpha=None, marker="+")

    points = np.where(pval < epsilon)
    plt.scatter(Xval[:, 0][points], Xval[:, 1][points], s=50, marker='+', color='red')
    plt.contour(x1, x2, Z, x)

    plt.show()
Esempio n. 12
0
def multiple_optima(gene_number=937, resolution=80, model_restarts=10, seed=10000, max_iters=300, optimize=True, plot=True):
    """
    Show an example of a multimodal error surface for Gaussian process
    regression. Gene 939 has bimodal behaviour where the noisy mode is
    higher.
    """

    # Contour over a range of length scales and signal/noise ratios.
    length_scales = np.linspace(0.1, 60., resolution)
    log_SNRs = np.linspace(-3., 4., resolution)

    try:import pods
    except ImportError:
        print('pods unavailable, see https://github.com/sods/ods for example datasets')
        return
    data = pods.datasets.della_gatta_TRP63_gene_expression(data_set='della_gatta',gene_number=gene_number)
    # data['Y'] = data['Y'][0::2, :]
    # data['X'] = data['X'][0::2, :]

    data['Y'] = data['Y'] - np.mean(data['Y'])

    lls = GPy.examples.regression._contour_data(data, length_scales, log_SNRs, GPy.kern.RBF)
    if plot:
        pb.contour(length_scales, log_SNRs, np.exp(lls), 20, cmap=pb.cm.jet)
        ax = pb.gca()
        pb.xlabel('length scale')
        pb.ylabel('log_10 SNR')

        xlim = ax.get_xlim()
        ylim = ax.get_ylim()

    # Now run a few optimizations
    models = []
    optim_point_x = np.empty(2)
    optim_point_y = np.empty(2)
    np.random.seed(seed=seed)
    for i in range(0, model_restarts):
        # kern = GPy.kern.RBF(1, variance=np.random.exponential(1.), lengthscale=np.random.exponential(50.))
        kern = GPy.kern.RBF(1, variance=np.random.uniform(1e-3, 1), lengthscale=np.random.uniform(5, 50))

        m = GPy.models.GPRegression(data['X'], data['Y'], kernel=kern)
        m.likelihood.variance = np.random.uniform(1e-3, 1)
        optim_point_x[0] = m.rbf.lengthscale
        optim_point_y[0] = np.log10(m.rbf.variance) - np.log10(m.likelihood.variance);

        # optimize
        if optimize:
            m.optimize('scg', xtol=1e-6, ftol=1e-6, max_iters=max_iters)

        optim_point_x[1] = m.rbf.lengthscale
        optim_point_y[1] = np.log10(m.rbf.variance) - np.log10(m.likelihood.variance);

        if plot:
            pb.arrow(optim_point_x[0], optim_point_y[0], optim_point_x[1] - optim_point_x[0], optim_point_y[1] - optim_point_y[0], label=str(i), head_length=1, head_width=0.5, fc='k', ec='k')
        models.append(m)

    if plot:
        ax.set_xlim(xlim)
        ax.set_ylim(ylim)
    return m # (models, lls)
Esempio n. 13
0
def PlotDecisionBoundary(predictor, train_data, test_data):
  x1 = np.arange(0, 1, 0.01)
  x2 = np.arange(0, 1, 0.01)
  x1,x2 = np.meshgrid(x1, x2)
  y = np.zeros_like(x1)
  m,n = y.shape
  for i in range(m):
    for j in range(n):
      y[i,j] = predictor.Classify([x1[i,j], x2[i,j]])
  # now do plots
  train_pos = train_data[:,2] == 1
  train_neg = train_data[:,2] == -1
  test_pos = test_data[:,2] == 1
  test_neg = test_data[:,2] == -1
  plt.figure(figsize=(12,5))
  plt.subplot(121)
  plt.plot(train_data[train_pos,0], train_data[train_pos,1], 'r+')
  plt.plot(train_data[train_neg,0], train_data[train_neg,1], 'b+')
  plt.contour(x1, x2, y, levels=[0])
  plt.xlim(0, 1)
  plt.xlabel(r'$x_1$', fontsize=20)
  plt.ylim(0, 1)
  plt.ylabel(r'$x_2$', fontsize=20)
  plt.title('Decision boundary overimposed on training data', fontsize=15)
  plt.subplot(122)
  plt.plot(test_data[test_pos,0], test_data[test_pos,1], 'r+')
  plt.plot(test_data[test_neg,0], test_data[test_neg,1], 'b+')
  plt.contour(x1, x2, y, levels=[0])
  plt.xlim(0, 1)
  plt.xlabel(r'$x_1$', fontsize=20)
  plt.ylim(0, 1)
  plt.ylabel(r'$x_2$', fontsize=20)
  plt.title('Decision boundary overimposed on test data', fontsize=15)
  plt.tight_layout()
  plt.show()
Esempio n. 14
0
def MonotoneRegion():
    matplotlib.rcParams['xtick.direction'] = 'out'
    matplotlib.rcParams['ytick.direction'] = 'out'

    delta = 0.025
    w2 = np.arange(-5.0, 5.0, delta)
    a = np.arange(0.0, 5.0, delta)
    W2, A = np.meshgrid(w2, a)
    LAM_cc = -1. + 5.*(A**2.) - np.sqrt(1. + A**4. + 2.*(A**2.)*(-1. + 8.*(W2**2.)))  # They're actually the same boundary!!!!
    # solve for when square root above is less than b^2 and you get same as below, LAM_eg
    LAM_eg = -1. + 3.*(A**2.) - 2.*(W2**2.)
    # You can force all the contours to be the same color.
    plt.figure()
    CS = plt.contour(W2, A, LAM_cc, 10,
                     colors='k',  # negative contours will be dashed by default
                     )
    plt.clabel(CS, fontsize=9, inline=1)
    plt.title('Monotone Region: Crossing the Curl')
    plt.savefig('monregion_cc.png')
    plt.close()

    plt.figure()
    CS = plt.contour(W2, A, LAM_eg, 10,
                     colors='k',  # negative contours will be dashed by default
                     )
    plt.clabel(CS, fontsize=9, inline=1)
    plt.title('Monotone Region: Extragradient')
    plt.savefig('monregion_eg.png')
    plt.close()
Esempio n. 15
0
 def mapplot(xpos,ypos,z,nsteps=50):
  #ncontours=15
  xmin,xmax=xpos.min(),xpos.max()
  ymin,ymax=ypos.min(),ypos.max()
  xi=numpy.linspace(xmin,xmax,nsteps)
  yi=numpy.linspace(ymin,ymax,nsteps)
  nplots=z.shape[0]
  nrows,ncols=nrows_ncols_from_nplots(nplots)
  # Default 'nn' interpolation of griddata requires package mpl_toolkits.natgrid, currently py2 only
  try:
   import mpl_toolkits.natgrid
   interpolation = 'nn'
  except ImportError:
   interpolation = 'linear'
  for thisplot in range(0,nplots):
   # cf. http://www.scipy.org/Cookbook/Matplotlib/Gridding_irregularly_spaced_data
   zi=mlab.griddata(xpos,ypos,z[thisplot],xi,yi,interp=interpolation)
   # Don't mess with arranging plots on a page if we only have a single plot...
   if nplots>1:
    plt.subplot(nrows,ncols,thisplot+1)
   # pcolormesh is described to be much faster than pcolor
   # Note that the default for edgecolors appears to be 'None' resulting in transparent lines between faces...
   gplot=plt.pcolormesh(xi,yi,zi,norm=plt.Normalize(vmin=vmin,vmax=vmax),shading='flat',edgecolors='face',antialiaseds=False)
   #gplot=plt.contourf(g,ncontours)
   #plt.scatter(xpos,ypos,marker='o',c='black',s=5) # Draw sample points
   if isolines:
    plt.contour(xi,yi,zi,isolines,colors='black',linestyles='solid')
   gplot.axes.set_axis_off()
   gplot.axes.set_xlim(xmin,xmax)
   gplot.axes.set_ylim(ymin,ymax)
 def plot(self, trajectories=[], bold=[]):
     fig, ax = plt.subplots()
     if self._states is not None:
         plt.imshow(self._states, extent=self.extent, cmap="Blues",
                    interpolation='nearest', vmin=0.0, vmax=2.0,
                    aspect='auto')
     if self._pes is not None:
         plt.contour(self.X, self.Y, self._pes,
                     levels=self.contour_range, colors='k')
     if self._interfaces is not None:
         for iface in self._interfaces:
             plt.contour(self.X, self.Y, iface,
                         colors='r', interpolation='none', levels=[0.5])
     if self._initcond is not None:
         ax.plot(self._initcond.coordinates[0,0],
                 self._initcond.coordinates[0,1],
                 'ro', zorder=3)
     for traj in bold:
         plt.plot(traj.xyz[:,0,0], traj.xyz[:,0,1],
                  self.repcolordict[bold.index(traj)], linewidth=2,
                 zorder=1)
     for traj in trajectories:
         plt.plot(traj.xyz[:,0,0], traj.xyz[:,0,1],
                  self.repcolordict[trajectories.index(traj) % 5],
                  zorder=2)
     return fig
Esempio n. 17
0
def contourRegrid(l,S,limits,time,before,lvls=[],vlims=[]):
    plt.close()
    def makeplot():
#        plt.clabel(ph, inline=1, fontsize=10)        
#        plt.axis('equal')
        plt.axis(limits)
        if before:
            plt.title(str+' before regridding, time = %03f' % time)
            fname = os.path.expanduser('~/VEsims/') + str +'Regridding%02dBefore.pdf' % int(round(time))
        else:
            plt.title(str+' after regridding, time = %03f' % time)
            fname = os.path.expanduser('~/VEsims/') + str +'Regridding%02dRegrid.pdf' % int(round(time))
        plt.savefig(fname)
    ph=plt.contour(l[:,:,0],l[:,:,1],S[:,:,0,0],levels=lvls[0])
#    ph = plt.pcolor(l[:,:,0],l[:,:,1],S[:,:,0,0],vmin=vlims[0],vmax=vlims[1],cmap=cm.cool)
    plt.colorbar(ph)
    str='S11'
    makeplot()
    plt.clf()
    ph=plt.contour(l[:,:,0],l[:,:,1],S[:,:,1,1],levels=lvls[1])
#    ph = plt.pcolor(l[:,:,0],l[:,:,1],S[:,:,1,1],vmin=vlims[2],vmax=vlims[3],cmap=cm.cool)
    plt.colorbar(ph)
    str='S22'
    makeplot()
    plt.close()
Esempio n. 18
0
def plot_dist(z, dz, om, dom, dist, dh, name, mathname, filename=None):
    """
    Make a 2-D plot of a distance versus redshift (x) and matter density (y).
    """
    # Grid of redshift and matter density values.
    x, y = numpy.meshgrid(z, om)
    pylab.figure()
    pylab.imshow(dist / dh,
                 extent=(z.min() - dz / 2.,
                         z.max() + dz / 2.,
                         om.max() + dom / 2.,
                         om.min() - dom / 2.),
                 interpolation='nearest',
                 aspect=z.max() / om.max(),
                 cmap=cm.Spectral
    )
    cb = pylab.colorbar()
    cb.ax.set_ylabel(r'$' + mathname + '/D_H$')

    pylab.contour(x, y, dist / dh, 10, colors='k')
    pylab.xlim(z.min(), z.max())
    pylab.ylim(om.min(), om.max())
    pylab.xlabel("Redshift z")
    pylab.ylabel(r"$\Omega_M = 1 - \Omega_\lambda$")
    pylab.title(name)
    if filename is not None:
        prefix, extension = filename.split('.')
        pylab.savefig(prefix + '_' + mathname + '.' + extension,
                      bbox_inches="tight")
Esempio n. 19
0
def animate(i):
    new_x = (random() - .5) * 4
    new_y = (random() - .5) * 4

    point_class = get_point_class(new_x, new_y)
    if point_class == -1:
        add_new_point(new_x, new_y, inside_x_points, inside_y_points, inside, -1)
    elif point_class == 1:
        add_new_point(new_x, new_y, outside_x_points, outside_y_points, outside, 1)
    else:
        return inside, outside
    if len(inside_x_points) == 0 or len(outside_x_points) == 0:
        return inside, outside

    perceptron_output['data'] = process_perceptron_output(perceptron_output['data'])

    new_a = array(perceptron_output['data'][0:3])
    new_b = array(perceptron_output['data'][3:6])
    new_c = perceptron_output['data'][6]
    plt.gca().clear()
    plt.contour(x, y[:,None].ravel(), get_eq(x, y, a, b, c), [0], colors='r')
    plt.contour(x, y[:,None].ravel(), get_eq(x, y, new_a, new_b, new_c), [0], colors='b')
    ax.plot(inside_x_points, inside_y_points, 'ro')
    ax.plot(outside_x_points, outside_y_points, 'bo')
    plt.draw()
    return inside, outside
def three_d_contour(file,show):

    xloc = raw_input("xlocation(two_d_initial_x_value.txt):") or "two_d_initial_x_value.txt"
    yloc = raw_input("ylocation(two_d_initial_y_value.txt):") or "two_d_initial_y_value.txt"
    uloc = raw_input("ulocation(two_d_cavity_u_results.txt):") or "two_d_cavity_u_results.txt"
    vloc = raw_input("vlocation(two_d_cavity_v_results.txt):") or "two_d_cavity_v_results.txt"
    ploc = raw_input("plocation(two_d_cavity_p_results.txt):") or "two_d_cavity_p_results.txt"
    u=np.loadtxt(uloc)
    v=np.loadtxt(vloc)
    p=np.loadtxt(ploc)
    X=np.loadtxt(xloc)
    Y=np.loadtxt(yloc)
    X,Y=np.meshgrid(X,Y)
#    print X,Y
    fig = plt.figure()
    plt.contourf(X,Y,p,alpha=0.5)    ###plnttong the pressure field as a contour
    plt.colorbar()
    plt.contour(X,Y,p)               ###plotting the pressure field outlines
    plt.quiver(X[::3,::3],Y[::3,::3],u[::3,::3],v[::3,::3]) ##plotting velocity
    plt.xlabel('X')
    plt.ylabel('Y')
    plt.title(file)
    sep = '.'
    filename = file.split(sep, 1)[0]
    plt.savefig(filename+'.png')   # save figure as .png file
    if show:
        # print('test')
        plt.show()
    plt.close(fig)
Esempio n. 21
0
    def test(self):
        xlim=[-2.0,2.0]
        ylim=[-2.0,2.0]
        
        resol = 0.025
        sample_x = np.arange(-2.0,2.0,resol)
        sample_y = np.arange(-2.0,2.0,resol)
        sample_X, sample_Y = np.meshgrid(sample_x, sample_y)
        sample_XX = np.hstack([sample_X.reshape(sample_X.size,1),sample_Y.reshape(sample_Y.size,1)])

        sample_Z1 = np.exp(self.gmm_0.score(sample_XX))
        sample_Z2 = np.exp(self.gmm_1.score(sample_XX))

        plt.figure()
        ax1 = plt.subplot(121, aspect='equal')    
        CS = plt.contour(sample_X, sample_Y, sample_Z1.reshape(sample_X.shape))
        plt.clabel(CS, inline=1, fontsize=10)

        ax2 = plt.subplot(122, aspect='equal')    
        CS = plt.contour(sample_X, sample_Y, sample_Z2.reshape(sample_X.shape))
        plt.clabel(CS, inline=1, fontsize=10)

        ax1.set_xlim(xlim)
        ax1.set_ylim(ylim)

        ax2.set_xlim(xlim)
        ax2.set_ylim(ylim)

        plt.show()

        #print gmm_1.get_params(deep=True)
        print self.gmm_0.weights_
        print self.gmm_0.means_
        print self.gmm_0.covars_
Esempio n. 22
0
def test_contour_heatmap():
    from scipy.interpolate import griddata
    from matplotlib import pyplot as plt

    mesh3D = mesh(200)
    mesh2D = proj_to_2D(mesh3D)

    data = np.zeros((3,3))
    data[0,1] += 2

    vals = np.exp(log_dirichlet_density(mesh3D,2.,data=data.sum(0)))
    temp = log_censored_dirichlet_density(mesh3D,2.,data=data)
    censored_vals = np.exp(temp - temp.max())

    xi = np.linspace(-1,1,1000)
    yi = np.linspace(-0.5,1,1000)

    plt.figure()
    plt.contour(griddata((mesh2D[:,0],mesh2D[:,1]),vals,(xi[None,:],yi[:,None]), method='cubic'))
    plt.axis('off')
    plt.title('uncensored likelihood')

    plt.figure()
    plt.contour(griddata((mesh2D[:,0],mesh2D[:,1]),censored_vals,(xi[None,:],yi[:,None]),method='cubic'))
    plt.axis('off')
    plt.title('censored likelihood')
def visualCost(X,y,theta):
    theta0_vals = np.linspace(-10, 10, 500)
    theta1_vals = np.linspace(-1, 4, 500)
    J_vals = np.zeros((len(theta0_vals),len(theta1_vals)))
    for i, elem0 in enumerate(theta0_vals):
        for j, elem1 in enumerate(theta1_vals):
            theta_vals = np.array([[elem0], [elem1]])
            J_vals[i, j] = computeCost(X, y, theta_vals)       
    theta0_vals, theta1_vals = np.meshgrid(theta0_vals,theta1_vals)

    J_vals = J_vals.T
    #surface plot
    fig4 = plt.figure()
    ax = fig4.gca(projection='3d')
    ax.plot_surface(theta0_vals, theta1_vals, J_vals, cmap=cm.jet)
    ax.view_init(azim = 180+40,elev = 25)
    plt.xlabel("theta_0")
    plt.ylabel("theta_1")
    plt.savefig("cost3D.pdf")
    
    #contour plot
    plt.figure(5)
    plt.contour(theta0_vals, theta1_vals, J_vals, levels=np.logspace(-2, 3, 20))
    plt.scatter(theta[0,0], theta[1,0],  c='r', marker='x', s=20, linewidths=1)  # mark converged minimum
    plt.xlim([-10, 10])
    plt.ylim([-1, 4])
    plt.xlabel("theta_0")
    plt.ylabel("theta_1")
    plt.savefig("costContour.pdf")
Esempio n. 24
0
def visualize_fit(X, mu_vec, var_vec):
    """ Plots dataset and estimated Gaussian distribution.

    Args:
      X: Matrix of features.
      mu_vec: Vector of mean values of features.
      var_vec: Vector of variances of features.

    Returns:
      None.
    """
    pyplot.scatter(X[:, 0], X[:, 1], s=80, marker='x', color='b')
    pyplot.ylabel('Throughput (mb/s)', fontsize=18)
    pyplot.xlabel('Latency (ms)', fontsize=18)
    pyplot.hold(True)
    u_vals = numpy.linspace(0, 35, num=71)
    v_vals = numpy.linspace(0, 35, num=71)
    z_vals = numpy.zeros((u_vals.shape[0], v_vals.shape[0]))
    for u_index in range(0, u_vals.shape[0]):
        for v_index in range(0, v_vals.shape[0]):
            z_vals[u_index, v_index] = (
                multivariate_gaussian(numpy.c_[u_vals[u_index],
                                               v_vals[v_index]], mu_vec,
                                      var_vec))
    z_vals_trans = numpy.transpose(z_vals)
    u_vals_x, v_vals_y = numpy.meshgrid(u_vals, v_vals)
    z_vals_reshape = z_vals_trans.reshape(u_vals_x.shape)
    exp_seq = numpy.linspace(-20, 1, num=8)
    pow_exp_seq = numpy.power(10, exp_seq)
    pyplot.contour(u_vals, v_vals, z_vals_reshape, pow_exp_seq)
    pyplot.hold(False)
    return None
Esempio n. 25
0
def plot2(X,Y,beta):
	#function to plot non linear decison boundary
	for i in range(1,X.shape[0]):
		if Y[i] == 1:
			plt.plot(X[i][1],X[i][2],'rs')
		else:
			plt.plot(X[i][1],X[i][2],'bs')
	#Here is the grid range
	#50 values btw -1 and 1.5
    	u = np.linspace(-1, 1.5, 50);
    	v = np.linspace(-1, 1.5, 50);

    	z = np.zeros((len(u), len(v)));
    	#Evaluate z = theta*x over the grid
    	#decision boundary is given by theta*x = 0
    	for i in range(0,len(u)):
    		for j in range(1,len(v)):
    			temp = np.matrix([u[i],v[j]])
    	        	z[i,j] = mapFeatures(temp,6)*beta;
    	 
    	z = z.T; # important to transpose z before calling contour
	
	#Plot z = 0
	#Notice you need to specify the range [0, 0]
	plt.contour(u, v, z, [0, 0])
	
	plt.show()	
Esempio n. 26
0
def gaussFit(data, inix=0, iniy=0, amp=1.):
   
    xv = np.linspace(np.max(data.xaxis), np.min(data.xaxis), data.xaxis.shape[0])
    yv = np.linspace(np.min(data.yaxis), np.max(data.yaxis), data.yaxis.shape[0])

    x, y = np.meshgrid(xv, yv)
    
    p_init = models.Gaussian2D(amplitude=amp, x_mean=inix, y_mean=iniy, x_stddev=.01 , y_stddev=.1,theta=0.)
    fit_p = fitting.LevMarLSQFitter()
    
#    for i in range(256):
#        for j in range(256):
#            if (data.yaxis[j]>-51. ): #12*data.xaxis[i]-373.8):
#                data.image[j,i]=0.
    
    
    p = fit_p(p_init, x, y, data.image)
    print p
    
    th=p.theta.value
    a=(np.cos(th)**2)/(2*p.x_stddev.value**2) + (np.sin(th)**2)/(2*p.y_stddev.value**2)
    b=-(np.sin(2*th))/(4*p.x_stddev.value**2) + (np.sin(2*th)) /(4*p.y_stddev.value**2)
    c=(np.sin(th)**2)/(2*p.x_stddev.value**2) + (np.cos(th)**2)/(2*p.y_stddev.value**2)

    z=p.amplitude.value*np.exp(-(a*(x-p.x_mean.value)**2 - 2*b*(x-p.x_mean.value)*(y-p.y_mean.value) + c*(y-p.y_mean.value)**2  ))
    plt.contour(x,y,z, linewidths=2)
def show_segmented_result(original_img, closed_heathly, closed_cancer):
    # We load with opencv and plot with pyplot (BGR->RGB)
    original_img = cv2.cvtColor(original_img, cv2.COLOR_BGR2RGB)
    plt.imshow(original_img)
    plt.contour(closed_heathly, [0.5], linewidths=0.7, colors='g')
    plt.contour(closed_cancer, [0.5], linewidths=0.7, colors='r')
    plt.savefig(sys.argv[1]+"_out.png", dpi=700)
def main(argv):
    # Load Station Data
    conn = sqlite3.connect(_BABS_DATABASE_PATH)
    station_data = pd.read_sql(_BABS_QUERY, conn)
    station_data['Usage'] = 0.5*(station_data['Start Count'] + station_data['End Count'])

    print("\n\nLoading model to file ... ")
    with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.path.pardir, 'models', 'babs_usage_model.dill'), 'r') as f:
            babs_usage_model = dill.load(f)

    # Interpolate all features from feature models
    lats, longs = np.meshgrid(np.linspace(37.7,37.82,30), np.linspace(-122.55,-122.35,30))
    transformed_features = babs_usage_model.named_steps['features_from_lat_long'].transform(pd.DataFrame({'Latitude': lats.reshape(1,-1).squeeze(), 'Longitude': longs.reshape(1,-1).squeeze()}))

    prediction_features = pd.DataFrame({'Latitude': lats.reshape(1,-1).squeeze(), 'Longitude': longs.reshape(1,-1).squeeze()})
    usage_predictions = babs_usage_model.predict(prediction_features)
    usage_predictions[np.array(transformed_features['Elevation']<0)] = np.nan
    usage_predictions = np.lib.scimath.logn(100, usage_predictions - np.nanmin(usage_predictions) + 1)
    usage_predictions[np.where(np.isnan(usage_predictions))] = 0

    plt.contourf(longs, lats, usage_predictions.reshape(30,-1),
                 norm=colors.Normalize(np.mean(usage_predictions)-(1*np.std(usage_predictions)), np.mean(usage_predictions)+(1*np.std(usage_predictions)), clip=True),
                 levels=np.linspace(0.01,max(usage_predictions),300))
    plt.contour(longs, lats, (transformed_features['Elevation']).reshape(30,-1), linewidth=0.2, colors='white')
    plt.scatter(station_data[station_data['Landmark']=='San Francisco']['Longitude'], station_data[station_data['Landmark']=='San Francisco']['Latitude'], s=2, )

    # plt.scatter(longs,
    #             lats,
    #             #s=(usage_predictions<0)*10,
    #             s=(transformed_features['Elevation']>0)*10,
    #             cmap=matplotlib.cm.Reds)
    plt.show()
Esempio n. 29
0
def plot_decision_kernel_boundary(X,y,scaler, sigma, clf,  xlabel, ylabel, legend):

    ax = plot_twoclass_data(X,y,xlabel,ylabel,legend)
    ax.autoscale(False)

    # create a mesh to plot in
    h = 0.05
    x1_min, x1_max = X[:, 0].min() - 1, X[:, 0].max() + 1
    x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1
    xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, h),
                         np.arange(x2_min, x2_max, h))

    ZZ = np.array(np.c_[xx1.ravel(), xx2.ravel()])
    K = np.array([gaussian_kernel(x1,x2,sigma) for x1 in ZZ for x2 in X]).reshape((ZZ.shape[0],X.shape[0]))

    # need to scale it
    scaleK = scaler.transform(K)

    # and add the intercept column of ones
    KK = np.vstack([np.ones((scaleK.shape[0],)),scaleK.T]).T

    # make predictions on this mesh
    Z = clf.predict(KK)

    # Put the result into a contour plot
    Z = Z.reshape(xx1.shape)
    plt.contour(xx1,xx2,Z,cmap=plt.cm.gray,levels=[0.5])
Esempio n. 30
0
    def contour_on_peak(
        self,
        scale=10
        ):

        plt.figure()

        map = self.linked_data.peak_map()

        vmax = np.max(map[np.isfinite(map)])
        vmin = 0.0
        if scale != None:
            if self.linked_data.noise != None:
                if self.linked_data.noise.scale != None:
                    vmax = scale*self.linked_data.noise.scale
                    vmin = 0.0

        plt.imshow(
            map,
            vmin=vmin,
            vmax=vmax,
            origin='lower')
        plt.contour(
            self.twod(),
            linewidths=0.5,
            colors='white'
            )
        plt.show()
Esempio n. 31
0
 def plot_contour(contour_data: np.ndarray, extra_args: Dict) -> None:
     if contour_data.shape != image.shape:
         raise ValueError(
             "The image and the contour data should have matching size, but got {} and {}"
             .format(image.shape, contour_data.shape))
     plt.contour(contour_data, levels=[.5], **extra_args)
Esempio n. 32
0
def display_img(im,
                beamparams=None,
                scale='linear',
                gamma=0.5,
                cbar_lims=False,
                has_cbar=True,
                has_title=True,
                cfun='afmhot',
                axis=False,
                show=False,
                fontsize=FONTSIZE):
    """display the figure on a given axis
       cannot use im.display because  it makes a new figure
    """

    interp = 'gaussian'

    if axis:
        ax = axis
    else:
        fig = plt.figure()
        ax = fig.add_subplot(1, 1, 1)

    imvec = np.array(im.imvec).reshape(-1)
    #flux unit is mJy/uas^2
    imvec = imvec * 1.e3
    fovfactor = im.xdim * im.psize * (1 / RADPERUAS)
    factor = (1. / fovfactor)**2 / (1. / im.xdim)**2
    imvec = imvec * factor

    imarr = (imvec).reshape(im.ydim, im.xdim)
    unit = 'mJy/$\mu$ as$^2$'
    if scale == 'log':
        if (imarr < 0.0).any():
            print('clipping values less than 0')
            imarr[imarr < 0.0] = 0.0
        imarr = np.log(imarr + np.max(imarr) / dynamic_range)
        unit = 'log(' + unit + ')'

    if scale == 'gamma':
        if (imarr < 0.0).any():
            print('clipping values less than 0')
            imarr[imarr < 0.0] = 0.0
        imarr = (imarr + np.max(imarr) / dynamic_range)**(gamma)
        unit = '(' + unit + ')^gamma'

    if cbar_lims:
        imarr[imarr > cbar_lims[1]] = cbar_lims[1]
        imarr[imarr < cbar_lims[0]] = cbar_lims[0]

    if cbar_lims:
        ax = ax.imshow(imarr,
                       cmap=plt.get_cmap(cfun),
                       interpolation=interp,
                       vmin=cbar_lims[0],
                       vmax=cbar_lims[1])
    else:
        ax = ax.imshow(imarr, cmap=plt.get_cmap(cfun), interpolation=interp)

    if has_cbar:
        cbar = plt.colorbar(ax, fraction=0.046, pad=0.04, format='%1.2g')
        cbar.set_label(unit, fontsize=fontsize)
        cbar.ax.xaxis.set_label_position('top')
        cbar.ax.tick_params(labelsize=16)
        if cbar_lims:
            plt.clim(cbar_lims[0], cbar_lims[1])

    if not (beamparams is None):
        beamparams = [
            beamparams[0], beamparams[1], beamparams[2], -.35 * im.fovx(),
            -.35 * im.fovy()
        ]
        beamimage = im.copy()
        beamimage.imvec *= 0
        beamimage = beamimage.add_gauss(1, beamparams)
        halflevel = 0.5 * np.max(beamimage.imvec)
        beamimarr = (beamimage.imvec).reshape(beamimage.ydim, beamimage.xdim)
        plt.contour(beamimarr, levels=[halflevel], colors='w', linewidths=3)
        ax = plt.gca()

    plt.axis('off')
    fov_uas = im.xdim * im.psize / RADPERUAS  # get the fov in uas
    roughfactor = 1. / 3.  # make the bar about 1/3 the fov
    fov_scale = 40
    #fov_scale = int( math.ceil(fov_uas * roughfactor / 10.0 ) ) * 10 # round around 1/3 the fov to nearest 10
    start = im.xdim * roughfactor / 3.0  # select the start location
    end = start + fov_scale / fov_uas * im.xdim  # determine the end location based on the size of the bar
    plt.plot([start, end], [im.ydim - start, im.ydim - start],
             color="white",
             lw=1)  # plot line
    plt.text(x=(start + end) / 2.0,
             y=im.ydim - start - im.ydim / 20,
             s=str(fov_scale) + " $\mu$as",
             color="white",
             ha="center",
             va="center",
             fontsize=int(1.2 * fontsize),
             fontweight='bold')

    ax.axes.get_xaxis().set_visible(False)
    ax.axes.get_yaxis().set_visible(False)

    if show:
        plt.show(block=False)

    return ax
Esempio n. 33
0
def plot_ts(total_profiles_data, stations_to_plot, stations_range, lon,
            section_code, deployment_info):

    pres = []
    temp = []
    psal = []
    dens = []
    ptemp = []
    station_idx = []

    fig1 = plt.figure(figsize=(7.87402, 6.29921))
    ax1 = fig1.add_subplot(111)

    font_size = 8

    for n in range(0, stations_range):

        station = stations_to_plot[n]

        pres_station = total_profiles_data[station]['pres']
        temp_station = total_profiles_data[station]['temp1']
        psal_station = total_profiles_data[station]['psal1']

        RANGE = len(psal_station)
        station_idx_station = [lon[n]] * RANGE
        #pres.append(pres_station)
        #            temp.append(temp_station)
        #            psal.append(psal_station)

        dens_station, ptemp_station, si, ti, smin, smax, tmin, tmax = DataOps.calculate_dens_and_ptemp(
            psal_station, temp_station, pres_station)
        #            dens.append(dens_station)
        #            ptemp.append(ptemp_station)
        psal = np.concatenate((psal, psal_station), axis=0)
        ptemp = np.concatenate((ptemp, ptemp_station), axis=0)
        station_idx = np.concatenate((station_idx, station_idx_station),
                                     axis=0)

        if n == 0:
            levels = np.arange(dens_station.min(), dens_station.max(), 0.1)
            CS = plt.contour(si,
                             ti,
                             dens_station,
                             linewidths=0.05,
                             linestyle='--',
                             colors='k',
                             levels=levels)
            plt.clabel(CS, fontsize=4, inline=1, inline_spacing=1,
                       fmt='%1.1f')  # Label every second level

            ax1.grid(b=True, which='major', color='grey', linewidth=0.01)
            ax1.grid(b=True, which='minor', color='grey', linewidth=0.001)
            ax1.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
            ax1.xaxis.set_major_formatter(FormatStrFormatter('%.2f'))

            plt.xticks(np.arange(smin, smax, 0.1))
            plt.xticks(rotation=45)
            plt.yticks(np.arange(tmin + 1, tmax, 0.25))
            plt.tick_params(axis='both', which='major', labelsize=6)
            location_long_name = PlotSettings.set_location_long_name(
                section_code)
            plt.title(deployment_info['cruise_name'] + ' - Theta-S' + '  -  ' +
                      location_long_name,
                      fontsize=font_size)
            plt.xlabel('Salinity (PSU)', fontsize=font_size)
            plt.ylabel('Potential Temperature (deg C)', fontsize=font_size)

    plt.scatter(psal,
                ptemp,
                c=station_idx,
                s=20,
                edgecolor='black',
                linewidth='0.05')

    cbar = plt.colorbar()
    cbar.set_label('Longitude (deg E)', fontsize=font_size)
    cbar.ax.tick_params(labelsize=6)
    # set axes range
    plt.xlim(37.7, 38.7)
    plt.ylim(12.8, 15.2)
    # save figure
    figure_name = section_code + '_theta-s_diag.png'
    DataOps.save_figure(fig1, globfile.paths['output_figs_path'],
                        deployment_info['deployment_name'], figure_name)
Esempio n. 34
0
import numpy as np
import matplotlib.pyplot as plt
from sympy import *

p = lambda x, y: 2 * x**2 + 2 * y**2 + 4 * x + 4 * y + 3
q = lambda x, y: x**2 + y**2 + 2 * x * y + 3 * x + 5 * y + 4

x = np.arange(-5, 5, 0.001)
y = np.arange(-5, 5, 0.001)

X, Y = np.meshgrid(x, y)

plt.contour(X, Y, p(X, Y), [0], colors='red')
plt.contour(X, Y, q(X, Y), [0], colors='blue')
plt.plot(-1.08405, 0, marker='x', color='black')
plt.plot(-0.29791, 0, marker='x', color='black')
plt.show()

# Using sylvester's method represent matrix Q_1
x = Symbol('x', real=True)
Q = Matrix([[2, 4, 2 * x**2 + 4 * x + 3, 0], [0, 2, 4, 2 * x**2 + 4 * x + 3],
            [1, 5 + 2 * x, x**2 + 3 * x + 4, 0],
            [0, 1, 5 + 2 * x, x**2 + 3 * x + 4]])
equation = Q.det()
print(equation)
roots_x = solve(equation, x)
print("Roots of {} are \n{} and {}".format(equation, roots_x[0].evalf(),
                                           roots_x[1].evalf()))

# Sanity check to see whether roots from ratio method are correct
# Ratio method roots were computed manually
Esempio n. 35
0
    def run_task(self):  # {{{
        """
        Plots time-series output of properties in an ocean region.
        """
        # Authors
        # -------
        # Xylar Asay-Davis

        self.logger.info("\nPlotting TS diagram for {}"
                         "...".format(self.regionName))

        register_custom_colormaps()

        config = self.config
        sectionName = self.sectionName

        startYear = self.mpasClimatologyTask.startYear
        endYear = self.mpasClimatologyTask.endYear

        regionMaskFile = self.mpasMasksSubtask.geojsonFileName

        fcAll = read_feature_collection(regionMaskFile)

        fc = FeatureCollection()
        for feature in fcAll.features:
            if feature['properties']['name'] == self.regionName:
                fc.add_feature(feature)
                break

        self.logger.info('  Make plots...')

        groupLink = 'tsDiag' + self.regionGroup[0].lower() + \
            self.regionGroup[1:].replace(' ', '')

        nSubplots = 1 + len(self.obsDicts)
        if self.controlConfig is not None:
            nSubplots += 1

        if nSubplots == 4:
            nCols = 2
            nRows = 2
        else:
            nCols = min(nSubplots, 3)
            nRows = (nSubplots - 1) // 3 + 1

        axisIndices = numpy.reshape(numpy.arange(nRows * nCols),
                                    (nRows, nCols))[::-1, :].ravel()

        titleFontSize = config.get('plot', 'titleFontSize')
        axis_font = {'size': config.get('plot', 'axisFontSize')}
        title_font = {
            'size': titleFontSize,
            'color': config.get('plot', 'titleFontColor'),
            'weight': config.get('plot', 'titleFontWeight')
        }

        width = 3 + 4.5 * nCols
        height = 2 + 4 * nRows

        # noinspection PyTypeChecker
        fig, axarray = plt.subplots(nrows=nRows,
                                    ncols=nCols,
                                    sharey=True,
                                    figsize=(width, height))

        if nSubplots == 1:
            axarray = numpy.array(axarray)

        if nRows == 1:
            axarray = axarray.reshape((nRows, nCols))

        T, S, zMid, volume, zmin, zmax = self._get_mpas_t_s(self.config)
        mainRunName = config.get('runs', 'mainRunName')
        plotFields = [{
            'S': S,
            'T': T,
            'z': zMid,
            'vol': volume,
            'title': mainRunName
        }]

        if self.controlConfig is not None:
            T, S, zMid, volume, _, _ = self._get_mpas_t_s(self.controlConfig)
            controlRunName = self.controlConfig.get('runs', 'mainRunName')
            plotFields.append({
                'S': S,
                'T': T,
                'z': zMid,
                'vol': volume,
                'title': 'Control: {}'.format(controlRunName)
            })

        for obsName in self.obsDicts:
            obsT, obsS, obsZ, obsVol = self._get_obs_t_s(
                self.obsDicts[obsName])
            plotFields.append({
                'S': obsS,
                'T': obsT,
                'z': obsZ,
                'vol': obsVol,
                'title': obsName
            })

        Tbins = config.getExpression(sectionName, 'Tbins', usenumpyfunc=True)
        Sbins = config.getExpression(sectionName, 'Sbins', usenumpyfunc=True)

        normType = config.get(sectionName, 'normType')

        PT, SP = numpy.meshgrid(Tbins, Sbins)
        SA = gsw.SA_from_SP(SP, p=0., lon=0., lat=-75.)
        CT = gsw.CT_from_t(SA, PT, p=0.)

        neutralDensity = sigma0(SA, CT)
        rhoInterval = config.getfloat(sectionName, 'rhoInterval')
        contours = numpy.arange(24., 29. + rhoInterval, rhoInterval)

        diagramType = config.get(sectionName, 'diagramType')
        if diagramType not in ['volumetric', 'scatter']:
            raise ValueError('Unexpected diagramType {}'.format(diagramType))

        lastPanel = None
        if config.has_option(sectionName, 'volMin'):
            volMinMpas = config.getfloat(sectionName, 'volMin')
        else:
            volMinMpas = None
        if config.has_option(sectionName, 'volMax'):
            volMaxMpas = config.getfloat(sectionName, 'volMax')
        else:
            volMaxMpas = None
        for index in range(len(axisIndices)):
            panelIndex = axisIndices[index]

            row = nRows - 1 - index // nCols
            col = numpy.mod(index, nCols)

            if panelIndex >= nSubplots:
                plt.delaxes(axarray[row, col])
                continue

            plt.sca(axarray[row, col])
            T = plotFields[index]['T']
            S = plotFields[index]['S']
            z = plotFields[index]['z']
            volume = plotFields[index]['vol']
            title = plotFields[index]['title']
            title = limit_title(title, max_title_length=60)

            CS = plt.contour(SP,
                             PT,
                             neutralDensity,
                             contours,
                             linewidths=1.,
                             colors='k',
                             zorder=2)
            plt.clabel(CS, fontsize=12, inline=1, fmt='%4.2f')

            if diagramType == 'volumetric':
                lastPanel, volMin, volMax = \
                    self._plot_volumetric_panel(T, S, volume)

                if volMinMpas is None:
                    volMinMpas = volMin
                if volMaxMpas is None:
                    volMaxMpas = volMax
                if normType == 'linear':
                    norm = colors.Normalize(vmin=0., vmax=volMaxMpas)
                elif normType == 'log':
                    if volMinMpas is None or volMaxMpas is None:
                        norm = None
                    else:
                        norm = colors.LogNorm(vmin=volMinMpas, vmax=volMaxMpas)
                else:
                    raise ValueError(
                        'Unsupported normType {}'.format(normType))
                if norm is not None:
                    lastPanel.set_norm(norm)
            else:
                lastPanel = self._plot_scatter_panel(T, S, z, zmin, zmax)

            CTFreezing = freezing.CT_freezing(Sbins, 0, 1)
            PTFreezing = gsw.t_from_CT(gsw.SA_from_SP(Sbins,
                                                      p=0.,
                                                      lon=0.,
                                                      lat=-75.),
                                       CTFreezing,
                                       p=0.)
            plt.plot(Sbins,
                     PTFreezing,
                     linestyle='--',
                     linewidth=1.,
                     color='k')

            plt.ylim([Tbins[0], Tbins[-1]])
            plt.xlim([Sbins[0], Sbins[-1]])

            plt.xlabel('Salinity (PSU)', **axis_font)
            if col == 0:
                plt.ylabel(r'Potential temperature ($^\circ$C)', **axis_font)
            plt.title(title)

        # do this before the inset because otherwise it moves the inset
        # and cartopy doesn't play too well with tight_layout anyway
        plt.tight_layout()

        fig.subplots_adjust(right=0.91)
        if nRows == 1:
            fig.subplots_adjust(top=0.85)
        else:
            fig.subplots_adjust(top=0.88)

        suptitle = 'T-S diagram for {} ({}, {:04d}-{:04d})\n' \
                   ' {} m < z < {} m'.format(self.regionName, self.season,
                                             startYear, endYear, zmin, zmax)
        fig.text(0.5,
                 0.9,
                 suptitle,
                 horizontalalignment='center',
                 **title_font)

        inset = add_inset(fig, fc, width=1.5, height=1.5)

        # add an empty plot covering the subplots to give common axis labels
        pos0 = axarray[0, 0].get_position()
        pos1 = axarray[-1, -1].get_position()
        pos_common = [pos0.x0, pos1.y0, pos1.x1 - pos0.x0, pos0.y1 - pos1.y0]
        print(pos_common)
        common_ax = fig.add_axes(pos_common, zorder=-2)
        common_ax.spines['top'].set_color('none')
        common_ax.spines['bottom'].set_color('none')
        common_ax.spines['left'].set_color('none')
        common_ax.spines['right'].set_color('none')
        common_ax.tick_params(labelcolor='w',
                              top=False,
                              bottom=False,
                              left=False,
                              right=False)

        common_ax.set_xlabel('Salinity (PSU)', **axis_font)
        common_ax.set_ylabel(r'Potential temperature ($^\circ$C)', **axis_font)

        # turn off labels for individual plots (just used for spacing)
        for index in range(len(axisIndices)):
            row = nRows - 1 - index // nCols
            col = numpy.mod(index, nCols)
            ax = axarray[row, col]
            ax.set_xlabel('')
            ax.set_ylabel('')

        # move the color bar down a little ot avoid the inset
        pos0 = inset.get_position()
        pos1 = axarray[-1, -1].get_position()
        pad = 0.04
        top = pos0.y0 - pad
        height = top - pos1.y0
        cbar_ax = fig.add_axes([0.92, pos1.y0, 0.02, height])
        cbar = fig.colorbar(lastPanel, cax=cbar_ax)

        if diagramType == 'volumetric':
            cbar.ax.get_yaxis().labelpad = 15
            cbar.ax.set_ylabel(r'volume (m$^3$)', rotation=270)
        else:
            cbar.ax.set_ylabel('depth (m)', rotation=270)

        outFileName = '{}/TS_diagram_{}_{}.png'.format(self.plotsDirectory,
                                                       self.prefix,
                                                       self.season)
        savefig(outFileName, tight=False)

        caption = 'Regional mean of {}'.format(suptitle)
        write_image_xml(config=config,
                        filePrefix='TS_diagram_{}_{}'.format(
                            self.prefix, self.season),
                        componentName='Ocean',
                        componentSubdirectory='ocean',
                        galleryGroup='T-S Diagrams',
                        groupLink=groupLink,
                        gallery=self.regionGroup,
                        thumbnailDescription=self.regionName,
                        imageDescription=caption,
                        imageCaption=caption)
Esempio n. 36
0
    plt.figure(figsize=figsize)

    plt.imshow(data, interpolation='nearest', origin='lower', aspect='auto')

    plt.clim(cmin, cmax)
    cbar = plt.colorbar()
    cbar.ax.tick_params(labelsize=fontsize)

    plt.title(filename, fontsize=fontsize)

    if 'prog_eta' in filename:
        plt.contour(data,
                    colors="black",
                    origin='lower',
                    extent=extent,
                    vmin=cmin,
                    vmax=cmax,
                    levels=eta_contour_levels,
                    linewidths=0.5)
    elif 'prog_h' in filename:
        plt.contour(data,
                    colors="black",
                    origin='lower',
                    extent=extent,
                    vmin=cmin,
                    vmax=cmax,
                    levels=h_contour_levels,
                    linewidths=0.5)
#	elif '_u' in filename:
#		hs = 0.001
#		h_contour_levels = np.append(np.arange(-0.1, 0-hs, hs), np.arange(hs, 0.1, hs))
Esempio n. 37
0
def run_optimizer(wvl,
                  PropDist,
                  Cn2,
                  beam_waist,
                  f_curv=-100e3,
                  beam_type='spherical',
                  c=8,
                  log2N_range=[11, 10, 9],
                  max_screens=15):
    """
    Run an optimization to determine the necessary sampling constraints for the input simulation scenario. The code
    will compare the optimal parameters at the knee of the log2N contour plot for different values of N, as denoted by
    the input list log2N_range. It will return the first N value in the list of log2N_range for which the number of
    screens in the simulation does not exceed max_screens

    :param wvl: the wavelength in meters
    :param PropDist: the propagation distance in meters
    :param Cn2: the cn2 of atmosphere in meters^-2/3s
    :param beam_waist: the beam waist in meters
    :param f_curv: the radius of curvature of the beam. A negative value indicates divergence
    :param beam_type: the type of beam: spherical or planar
    :param c: an energy conservation term. 2 means conserve 97% energy and 4 means conserve 99% energy
    :param log2N_range: the range of values to consider for 2**N. By putting them in descending order, you prioritize
                        a higher sampling rate over quicker simulation
    :param max_screens: the maximum number of screens that you want to use in simulations. Set this lower for quicker
                        but more aliased simulation
    :return: a dictionary containing the optimal parameters and the input conditions
    """

    # Define constants here
    k = 2 * pi / wvl  # optical wavenumber [rad/m]
    R = f_curv  # wavefront radius of curvature [m]

    # Spherical Wave and Plane Wave coherence diameters [m]
    if beam_type == 'spherical':
        r0 = (0.423 * k**2 * Cn2 * 3 / 8 * PropDist)**(-3 / 5)
    else:
        r0 = (0.423 * k**2 * Cn2 * PropDist)**(-3 / 5)

    # Calculate the tranverse coherence length for the purpose of calculating expected beam width
    rho_0 = r0 / 2.1

    # User toggle for plane or spherical wavefront
    coherence_diam = r0

    # Calculate the diameter of the receiver plane of interest using the equations for the long term beam wandering
    D_beam = np.sqrt(
        2
    ) * beam_waist  # the aperture diameter, per page 196 of the EOIR textbook
    D1 = D_beam
    BW_diffraction = (4 * PropDist**2.0) / ((k * D_beam)**2.0)
    BW_focusing = ((D_beam / 2)**2.0) * ((1 - PropDist / R)**2.0)
    BW_turbulence_spread = (4 * PropDist**2.0) / ((k * rho_0)**2.0)

    print("Input beam diameter: ", D_beam)
    print("Expected beam width due to diffraction and focusing: ",
          np.sqrt(2 * (BW_diffraction + BW_focusing)))
    print("Expected beam width due to diffraction, turbulence and focusing: ",
          np.sqrt(2 * (BW_diffraction + BW_focusing + BW_turbulence_spread)))
    print("Spread due to turbulence: ")
    print(BW_turbulence_spread)
    print()

    # Diameter for the observation aperture for long term averaging, from equation (2.154) of EOIR book
    # DRx = np.sqrt(2 * (BW_diffraction + BW_focusing + BW_turbulence_spread))

    # Changing DRx to the vacuum width
    # DRx =  np.sqrt(2 * (BW_diffraction + BW_focusing)) # by definition, the beam radius actually
    # DRx = 2* np.sqrt((BW_diffraction + BW_focusing)) # by definition, the beam diameter
    DRx = 2 * np.sqrt((BW_diffraction + BW_focusing + BW_turbulence_spread))
    # DRx = np.sqrt(2.4 * (BW_diffraction + BW_focusing))  # a trade off for computational purposes

    # log-amplitude variance
    p = np.linspace(0, int(PropDist), 1000)
    rytov = 0.563 * k**(7 / 6) * sum(Cn2 *
                                     (1 - p / PropDist)**(5 / 6) * p**(5 / 6) *
                                     (p[1] - p[0]))

    # screen properties
    # Define the r0's of each phase screen using a standard minimization algorithm.
    # There are potential divide by zero warnings that are then addressed by the code.
    NumScr = 11  # number of screens
    A = np.zeros((2, NumScr))  # matrix
    alpha = np.arange(0, NumScr) / (NumScr - 1)
    A[0, :] = alpha**(5 / 3)
    A[1, :] = (1 - alpha)**(5 / 6) * alpha**(5 / 6)
    b = np.array(
        [coherence_diam**(-5 / 3), rytov / 1.33 * (k / PropDist)**(5 / 6)])
    # initial guess
    x0 = (NumScr / 3 * coherence_diam * np.ones((NumScr, 1)))**(-5 / 3)
    # objective function, @ is matrix multiply
    fun = lambda X: np.sum((A @ X.flatten() - b)**2)
    # constraints
    x1 = np.zeros((NumScr, 1))
    rmax = 0.1  # maximum Rytov number per partial prop according to Martin/Flatte
    x2 = rmax / 1.33 * (k / PropDist)**(5 / 6) / A[1, :]
    x2[A[1, :] == 0] = 50**(-5 / 3)  # address divide by zero

    res = minimize(fun, x0, bounds=Bounds(x1, x2))
    soln = res.x
    # check screen r0s
    r0scrn = soln**(-3 / 5)
    r0scrn[np.isinf(r0scrn)] = 1e6  # address divide by zero
    # check resulting coherence_diam & rytov with minimization solution
    # too few phase screens will cause these number to disagree
    bp = A @ soln.flatten()
    compare1 = [bp[0]**(-3 / 5), bp[1] * 1.33 * (PropDist / k)**(5 / 6)]
    compare2 = [coherence_diam, rytov]
    # print(compare1, compare2)

    # Account for conservation of energy term in the source plane, the receiver plane has already been accounted for
    D1p = D1 + c * wvl * PropDist / coherence_diam

    # Changing the accounting for turbulence spreed here
    # DRxp = DRx
    DRxp = DRx + c * wvl * PropDist / coherence_diam

    print(
        "Expected beam width due to the conversation of energy after turbulent prop: "
    )
    print(DRxp)
    print()

    # Now perform the minimization on constraints 1-4
    delta1 = np.linspace(1.1 * wvl * PropDist / DRxp / 1000,
                         1.1 * wvl * PropDist / DRxp, 1000)
    deltan = np.linspace(1.1 * wvl * PropDist / D1p / 1000,
                         1.1 * wvl * PropDist / D1p, 1000)

    # constraint 1
    deltan_max = -DRxp / D1p * delta1 + wvl * PropDist / D1p
    # constraint 3
    Rdxmin3 = (1 + PropDist / R) * delta1 - wvl * PropDist / D1p
    Rdxmax3 = (1 + PropDist / R) * delta1 + wvl * PropDist / D1p

    # Derive the knee curve for each log2N value
    delta1_knee_list = []
    deltan_knee_list = []
    amp_value = 1.0
    for log2N in log2N_range:
        N_curr = 2**log2N
        deltan_constraint = (amp_value * wvl * PropDist /
                             (2 * delta1) + amp_value * DRxp / 2) / (
                                 N_curr - amp_value * D1p / (2 * delta1))

        # If deltan_constraint has values less than zero, then ignore those values
        valid_values = deltan_constraint > 0
        delta1_temp = delta1[valid_values]
        deltan_constraint = deltan_constraint[valid_values]

        # Find the default knee value index
        min_deltan = np.nanmin(deltan_constraint)
        max_deltan = np.nanmax(deltan_constraint)
        min_delta1 = np.min(delta1_temp)
        max_delta1 = np.max(delta1_temp)
        default_knee_logval_idx = np.nanargmin(
            (deltan_constraint / max_deltan - min_deltan / max_deltan)**2.0 +
            (delta1_temp / max_delta1 - min_delta1 / max_delta1)**2.0)

        # Iterate through the other possible knee values and find if any have a kee that will maximize layer thickness
        knee_logval_idx = default_knee_logval_idx
        max_sampling_dxdn = np.min([
            delta1_temp[default_knee_logval_idx],
            deltan_constraint[default_knee_logval_idx]
        ])
        for idx in range(0, np.size(deltan_constraint)):
            if np.min([delta1_temp[idx], deltan_constraint[idx]
                       ]) > max_sampling_dxdn:
                max_sampling_dxdn = np.min(
                    [delta1_temp[idx], deltan_constraint[idx]])
                knee_logval_idx = idx

        delta1_knee_list.append(delta1_temp[knee_logval_idx])
        deltan_knee_list.append(deltan_constraint[knee_logval_idx])

    # Debug: print the knee lists for inspection
    # print("Knee lists for delta1 and deltan: ")
    # print(delta1_knee_list)
    # print(deltan_knee_list)

    # Now iterate through the knee values and calculate the constraints on 1, 3 and the screens and make sure that they are valid
    c1_list = []
    c3_list = []
    c_screen_list = []
    num_screen_list = []
    max_layer_thickness_list = []
    for idx in range(len(log2N_range)):
        d1_idx = delta1_knee_list[idx]
        dn_idx = deltan_knee_list[idx]

        # Constraint 1:
        c1_deltan = (-DRxp / D1p * d1_idx + wvl * PropDist / D1p) >= dn_idx
        c1_list.append(c1_deltan)

        # Constraint 3:
        c3_deltan =  ((1 + PropDist / R) * d1_idx - wvl * PropDist / D1p) <= dn_idx and \
                     ((1 + PropDist / R) * d1_idx + wvl * PropDist / D1p) >= dn_idx
        c3_list.append(c3_deltan)

        # Final constraint: Is the minimum number of screens less than the desired max number
        N = 2**log2N_range[idx]
        zmax = (min(d1_idx, dn_idx)**2) * N / wvl  # mathematical placeholder
        max_layer_thickness_list.append(zmax)

        numScrMin = np.ceil(
            PropDist / zmax) + 2  # 1 , incremented beyond the minimum
        num_screen_list.append(numScrMin)

        c_screen = numScrMin <= max_screens
        c_screen_list.append(c_screen)

    # Debug:
    print("Min number of screens for log2N list", str(log2N_range), " : ")
    print(num_screen_list)
    print()

    # Using the descending order of our log2N_range list, return the maximum value that satisfies all constraints
    # while also satisfying the minimum number of screens
    constraint_list = np.logical_and(np.logical_and(c1_list, c3_list),
                                     c_screen_list)
    where_constraints_satisfied = np.where(constraint_list == True)[0]
    optimal_constraint_dict = {}
    optimal_constraint_dict["cn2"] = Cn2
    optimal_constraint_dict["propdist"] = PropDist
    optimal_constraint_dict["beam_waist"] = beam_waist
    optimal_constraint_dict["wavelength"] = wvl
    optimal_constraint_dict["f_curv"] = R

    if np.size(where_constraints_satisfied) == 0:
        print(
            "There is no value for which the maximum number of screens and constraints 1-4 are satisfied for this"
            "scenario. Please look at the plot and revise your simulation setup. "
        )
        optimal_constraint_dict["success"] = False
    else:
        optimal_constraint_dict["success"] = True
        first_constraint_idx = where_constraints_satisfied[0]
        print(
            "A satisfactory simulation scenario was found. Values of first occurence are: "
        )
        print("N = ", 2**log2N_range[first_constraint_idx])
        optimal_constraint_dict["N"] = 2**log2N_range[first_constraint_idx]
        print("dx = ", delta1_knee_list[first_constraint_idx])
        optimal_constraint_dict["dx"] = delta1_knee_list[first_constraint_idx]
        print("Rdx = ", deltan_knee_list[first_constraint_idx])
        optimal_constraint_dict["Rdx"] = deltan_knee_list[first_constraint_idx]
        print("Min # Screens = ", num_screen_list[first_constraint_idx])
        optimal_constraint_dict["screens"] = int(
            num_screen_list[first_constraint_idx])

        # print("Side Len = ", np.max([delta1_knee_list[first_constraint_idx] * 2**log2N_range[first_constraint_idx],
        #                              deltan_knee_list[first_constraint_idx] * 2**log2N_range[first_constraint_idx]]))
        # optimal_constraint_dict["side_len"] = np.max([delta1_knee_list[first_constraint_idx] * 2**log2N_range[first_constraint_idx],
        #                                         deltan_knee_list[first_constraint_idx] * 2**log2N_range[first_constraint_idx]])

        # Set sidelen to be the initial distance
        print(
            "Side Len = ", delta1_knee_list[first_constraint_idx] *
            2**log2N_range[first_constraint_idx])
        optimal_constraint_dict["side_len"] = delta1_knee_list[
            first_constraint_idx] * 2**log2N_range[first_constraint_idx]

        print()

    # Now plot the contours along with the valid constraints for our case
    plt.figure(1)
    plt.plot(delta1, deltan_max, 'b+')
    plt.plot(delta1, Rdxmin3, 'r-')
    plt.plot(delta1, Rdxmax3, 'r+')
    X, Y = np.meshgrid(delta1, deltan)
    log2N_range.sort()
    N2_contour = (wvl * PropDist + D1p * Y + DRxp * X) / (2 * X * Y)
    contours = plt.contour(delta1, deltan, np.log2(N2_contour), log2N_range)
    plt.clabel(contours, inline=True)

    # Plot the knee points with a marker to denote if it satisfies our constraints
    for idx in range(len(log2N_range)):

        # If it satisfies all three constraints, give it a star marker. Otherwise, give it a x marker
        if c1_list[idx] and c3_list[idx] and c_screen_list[idx]:
            marker_constraint = "*"
        else:
            marker_constraint = "x"

        plt.scatter(delta1_knee_list[idx],
                    deltan_knee_list[idx],
                    marker=marker_constraint,
                    s=40)

    # plt.colorbar()
    plt.axis([0, delta1[-1], 0, deltan[-1]])
    plt.title('Constraints 1, 2, 3 for point source problem')
    plt.xlabel('dx [m]')
    plt.ylabel('dn [m]')
    # plt.savefig('screen-params.png')

    plt.show()

    return optimal_constraint_dict
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D

delta = 0.5
pi = np.pi
x = np.arange(-pi, pi, delta)
y = np.arange(-pi, pi, delta)
X, Y = np.meshgrid(x, y)
z = np.cross(x,y)
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(X, Y, z, cmap=cm.jet, linewidth=0.2)
plt.show()

plt.figure()
CS = plt.contour(X, Y, z,10,colors='k') # 制作等高线,横砍10刀
plt.clabel(CS, inline=1, fontsize=9) #inline控制画标签,移除标签下的线
plt.title('Simplest default with labels')
plt.show()


#第五题:
import math
import numpy as np
from scipy.stats import f



import numpy as np
class PCA:
    def __init__(self, X):
Esempio n. 39
0
    def plot_lcurve_contourmap(self, draw_threshold=False,
                               xlim=None, ylim=None,
                               contour_step=None
                               ):
        """
        plot 'lcurve' contour map
        N  = number of levels to contour
        imethod = type of interpolation for numpy.griddata
                  options are 'nearest','linear', and 'cubic'
        symbol = symbol for plotting
        fontsize = numeric value
        draw_threshold = draw a black contour at a given percentage of the minimum rms error
                         e.g. 110 will draw a contour at 1.1 * minimum error value achieved

        """

        self.Fit.read_fit()
        a = self.Fit.penalty_anisotropy
        s = self.Fit.penalty_structure
        mis = self.Fit.misfit_mean
        aa = [str(round(i, 1)) for i in self.Fit.weight_anisotropy]
        ss = [str(round(i, 1)) for i in self.Fit.weight_structure]

        # grid the data to make contour plot
        # first, define points to grid
        points = np.vstack([s, a]).T
        # define points xi to grid onto
        xi = np.array(
            np.meshgrid(
                np.linspace(
                    0., max(s)), np.linspace(
                    0., max(a)))).T
#        print xi
        f1 = si.griddata(points, mis, xi, method=self.imethod)
        cmap = plt.get_cmap(self.cmap)

        if contour_step is not None:
            if contour_step < 1.:
                rounding = int(np.ceil(np.abs(np.log10(contour_step))))
            else:
                rounding = 0
            levels = np.arange(round(np.amin(self.Fit.misfit_mean), rounding),
                               round(np.amax(self.Fit.misfit_mean, rounding),
                                     contour_step))
            plt.contour(xi[:, :, 0],
                        xi[:, :, 1],
                        f1, cmap=cmap,
                        levels=levels)
        else:
            plt.contour(xi[:, :, 0], xi[:, :, 1], f1, cmap=cmap)
        plt.colorbar(use_gridspec=True)

        if draw_threshold:
            plt.contour(xi[:, :, 0], xi[:, :, 1], f1,
                        levels=[
                round(
                    min(mis) *
                    self.Fit.misfit_threshold,
                    2)],
                linewidths=[1.5],
                colors='k')
#        plt.gca().set_aspect('equal')
        plt.scatter(s, a, c='k', marker=self.symbol, lw=0)

        if xlim is None:
            xlim = plt.xlim()
        if ylim is None:
            ylim = plt.ylim()
        plt.xlim(xlim)
        plt.ylim(ylim)

        for i in range(len(aa)):
            if (s[i] < xlim[-1]) and (a[i] < ylim[-1]):
                plt.text(s[i], a[i], ','.join(
                    [ss[i], aa[i]]), fontsize=self.fontsize)
        plt.xlabel('structure penalty')
        plt.ylabel('anisotropy penalty')
Esempio n. 40
0
plt.clf()
plt.scatter(X1[0,:],X1[1,:])
plt.scatter(X2[0,:],X2[1,:])

# Train the SVM on the data
W, b = SVM_train(X, Y, .1, num_iter=10000, C=.001)

# Map the decision boundary and probability density
resolution = 30
x_range = np.linspace(-4,4,resolution)
y_range = np.linspace(-4,4,resolution)
Z = np.zeros((len(x_range), len(y_range)))
for idx, x in enumerate(x_range):
        X_test = np.array([x*np.ones(resolution), y_range])
        Z[idx,:] = SVM_predict(X_test, X, W, b)
plt.contour(x_range, y_range, Z, levels=[0])
plt.xlim(-3,3)
plt.ylim(-3,3)
plt.draw()











Esempio n. 41
0
]

for xi in range(X):
    for yi in range(Y):
        x = x_min + (x_max - x_min) / X * xi
        y = y_min + (y_max - y_min) / Y * yi
        z, iter = newton(f,
                         x + 1.j * y,
                         dx=dx,
                         dy=dy,
                         fprime=f1,
                         maxiter=maxiter)
        if iter < maxiter:
            z = round(z.real, ndigits) + 1j * round(z.imag, ndigits)
        else:
            z = None
        try:
            i = zeros.index(z)
        except ValueError:
            zeros.append(z)
            i = len(zeros)
        pic[yi, xi] = 0 if z is None else -i if abs(x + 1.j * y -
                                                    z) > .1**ndigits else i
        iters[yi, xi] = iter

plt.imshow(pic, interpolation='None', cmap='Set1')
plt.contour(np.log(iters))
plt.figure()
plt.imshow(np.log(iters), interpolation='None', cmap='gray')
plt.show()
Esempio n. 42
0
def contours(
    fn,
    package_dir,
    t2convert,
    levels=[0.1, 0.4, 0.8],
    names=['Light', 'Medium', 'Heavy'],
):

    print "contouring data in:", fn
    nc = Dataset(fn)
    particles = nc_particles.Reader(nc)
    times = particles.times
    dt = [
        np.abs(((output_t - t2convert).total_seconds()) / 3600)
        for output_t in times
    ]
    t = dt.index(min(dt))
    print 'Converting output from: ', times[t]

    TheData = particles.get_timestep(t,
                                     variables=[
                                         'latitude', 'longitude', 'id',
                                         'depth', 'mass', 'age', 'status_codes'
                                     ])

    # contouring
    status = TheData['status_codes']
    floating = np.where(status == 2)[0]
    x = TheData['longitude'][floating]
    y = TheData['latitude'][floating]

    # Peform the kernel density estimate
    xx, yy = np.mgrid[min(x) - .1:max(x) + .1:100j,
                      min(y) - .1:max(y) + .1:100j]
    positions = np.vstack([xx.ravel(), yy.ravel()])
    values = np.vstack([x, y])
    kernel = st.gaussian_kde(values)
    f = np.reshape(kernel(positions).T, xx.shape)
    max_density = f.max()

    levels.sort()
    particle_contours = [lev * max_density for lev in levels]

    cs = contour(xx, yy, f, particle_contours)

    w = shp.Writer(shp.POLYGON)
    w.autobalance = 1

    w.field('Year', 'C')
    w.field('Month', 'C')
    w.field('Day', 'C')
    w.field('Hour', 'C')
    w.field('Depth', 'N')
    w.field('Type', 'C')

    for c in range(len(cs.collections)):
        p = cs.collections[c].get_paths()[0]
        v = p.vertices
        coords = [[[i[0], i[1]] for i in v]]
        w.poly(shapeType=3, parts=coords)
        w.record(times[t].year, times[t].month, times[t].day, times[t].hour,
                 TheData['depth'][c], names[c])
        print names[c]

    source_fdir = os.path.join(package_dir, 'source_files')
    shapefile_name = os.path.split(fn)[-1].split('.')[0]
    w.save(os.path.join(source_fdir, shapefile_name))

    nc.close()

    # create the PRJ file
    prj_filename = os.path.join(source_fdir, shapefile_name)
    write_proj_file(prj_filename)

    files = os.listdir(source_fdir)
    zipfname = shapefile_name + '.zip'
    zipf = zipfile.ZipFile(os.path.join(source_fdir, zipfname), 'w')
    for f in files:
        if f.split('.')[0] == shapefile_name:
            zipf.write(os.path.join(source_fdir, f), arcname=f)
    zipf.close()

    return zipfname
Esempio n. 43
0
def main():
    # "defining" # of iterations
    iterations_n = 1000

    #Loading data from the training and test sets
    train1 = genfromtxt(filename1, delimiter=",")
    test1 = genfromtxt(filename2, delimiter=",")
    train2 = genfromtxt(filename3, delimiter=",")
    test2 = genfromtxt(filename4, delimiter=",")

    #Separating the vectors x = (x1,x2) for each dataset (training and test)
    x = train1[:, 0:-1]
    x2 = test1[:, 0:-1]
    x3 = train2[:, 0:-1]
    x4 = test2[:, 0:-1]

    #Separating the vectors y (expected classification) for each dataset (training and test)
    y = train1[:, -1]
    y2 = test1[:, -1]
    y3 = train2[:, -1]
    y4 = test2[:, -1]

    #Defining the appends to be added to the x vectors
    ap1 = np.ones(len(y))
    ap2 = np.ones(len(y2))
    ap3 = np.ones(len(y3))
    ap4 = np.ones(len(y4))

    #Appending the '1' values to the x vectors
    x = np.column_stack((ap1, x)).T
    x2 = np.column_stack((ap2, x2)).T
    x3 = np.column_stack((ap3, x3)).T
    x4 = np.column_stack((ap4, x4)).T

    #Setting initial weight vectors (in this case, all 0)
    w = np.zeros((1, x.shape[0])).T
    w3 = np.zeros(
        (1, x3.shape[0] + 2)).T  #inserting 2 extra features for nonlinear case

    #Nonlinear transformation for the nonlinear case
    new_x3 = x_transform(x3)
    new_x4 = x_transform(x4)

    #New weights for the linear case (max iteration = 1000, learning-rate = 0.1)
    w2, erro1 = logistic_r(x, y, x2, y2, w, iterations_n, 0.1)
    #New weights for the nonlinear case (max iteration = 1000, learning-rate = 0.1)
    w4, erro2 = logistic_r(new_x3, y3, new_x4, y4, w3, iterations_n, 0.1)

    #	h  = zf(w2,x)
    #	h2 = zf(w2,x2)
    #	h3 = zf(w4,new_x3)
    #	h4 = zf(w4,new_x4)

    #	c  = classifier(h)
    #	c2 = classifier(h2)
    #	c3 = classifier(h3)
    #	c4 = classifier(h4)

    #Plot of linear case (dataset 1)
    plt.figure(1)

    x1list = np.linspace(0.0, 1.0, iterations_n)
    x1_append = np.ones((1, iterations_n))
    x1list_aux = np.array([x1list])
    x1list_vec = np.row_stack((x1_append, x1list_aux))

    b_line = boundary(w2, x1list_vec)
    plt.scatter(x[1, :],
                x[2, :],
                c=y,
                s=100,
                edgecolor='black',
                marker='o',
                label='Training')

    plt.scatter(x2[1, :],
                x2[2, :],
                c=y2,
                s=150,
                edgecolor='black',
                marker='*',
                label='Test')
    plt.plot(x1list, b_line[0, :], label='Decision boundary')
    plt.xlabel('x1')
    plt.ylabel('x2')

    l1_1 = mlines.Line2D([], [],
                         color='blue',
                         marker='o',
                         markersize=15,
                         label='Training (y = 0)')
    l1_2 = mlines.Line2D([], [],
                         color='red',
                         marker='o',
                         markersize=15,
                         label='Training (y = 1)')
    l1_3 = mlines.Line2D([], [],
                         color='blue',
                         marker='*',
                         markersize=15,
                         label='Test (y = 0)')
    l1_4 = mlines.Line2D([], [],
                         color='red',
                         marker='*',
                         markersize=15,
                         label='Test (y = 1)')
    plt.legend([l1_1, l1_2, l1_3, l1_4], [
        'Training (y = 0)', 'Training (y = 1)', 'Test (y = 0)', 'Test (y = 1)'
    ],
               loc=3,
               scatterpoints=1)

    #Plot of linear case (dataset 2)
    plt.figure(2)

    #Creating meshgrid to plot the nonlinear decision boundary (second part of task2)
    x2list = x1list
    xx1, xx2 = np.meshgrid(x1list, x2list)
    #Evaluating the decision boundary function in all the meshgrid points so we can plot it
    F = boundary_sqr(xx1, xx2, w4)
    F = F.reshape(xx1.shape)
    #Plot of the nonlinear decision boundary
    cp = plt.contour(xx1, xx2, F, colors='green', linestyles='dashed')
    plt.xlabel('x1')
    plt.ylabel('x2')

    #Plot of the classified points
    plt.scatter(x3[1, :], x3[2, :], c=y3, s=100, edgecolor='black', marker='o')
    plt.scatter(x4[1, :], x4[2, :], c=y4, s=150, edgecolor='black', marker='*')

    l1_1 = mlines.Line2D([], [],
                         color='blue',
                         marker='o',
                         markersize=15,
                         label='Training (y = 0)')
    l1_2 = mlines.Line2D([], [],
                         color='red',
                         marker='o',
                         markersize=15,
                         label='Training (y = 1)')
    l1_3 = mlines.Line2D([], [],
                         color='blue',
                         marker='*',
                         markersize=15,
                         label='Test (y = 0)')
    l1_4 = mlines.Line2D([], [],
                         color='red',
                         marker='*',
                         markersize=15,
                         label='Test (y = 1)')
    plt.legend([l1_1, l1_2, l1_3, l1_4], [
        'Training (y = 0)', 'Training (y = 1)', 'Test (y = 0)', 'Test (y = 1)'
    ],
               loc=3,
               scatterpoints=1)
    #=================================================================
    # PLOT OF ERROR model
    aux_x = np.linspace(1, iterations_n, iterations_n)
    plt.figure(3)
    plt.plot(aux_x, erro1[0, :], c='green', label="Training set")
    plt.plot(aux_x, erro1[1, :], c='black', label="Test set")
    plt.xlabel('# of iteration')
    plt.ylabel('Cross-entropy error')
    plt.legend(loc=3)

    plt.figure(4)
    plt.plot(aux_x, erro2[0, :], c='green', label='Training set')
    plt.plot(aux_x, erro2[1, :], c='black', label='Test set')
    plt.xlabel('# of iteration')
    plt.ylabel('Cross-entropy error')
    plt.legend(loc=3)

    plt.show()
Esempio n. 44
0
                            data=df,
                            ax=ax[i, j],
                            legend=False)
            ax[i, j].title.set_text('%.1f, %.1f' % (ay, az))

            m = pymalts.malts_mf('Y',
                                 'T',
                                 df,
                                 k_tr=5,
                                 k_est=20,
                                 estimator='linear',
                                 smooth_cate=True,
                                 reweight=False,
                                 n_splits=2,
                                 n_repeats=1)
            cate = m.CATE_df['avg.CATE']
            ate = cate.mean()
            s[i, j, seed] += (ate - 1)

s = np.mean(s, axis=2)
sns.set()
fig = plt.figure()
CS = plt.contour(ays, azs, s * 10 - 0.05, levels=50)
plt.clabel(CS, inline=1, fontsize=10)
plt.colorbar()
plt.xlabel('sensitivity parameter for Outcome')
plt.ylabel('sensitivity parameter for Treatment')
plt.tight_layout()
plt.axvline(0, c='r')
plt.axhline(0, c='r')
fig.savefig('Figures/sensitivity_analysis.png')
Esempio n. 45
0
                 + w[5,0] * y**2
                )
'''
PLOTING LINEAR REGRESSION 

We are going to plot the (x,y) \in [-1,-1]^2 that their value after 
the linear regression for classification  is near to 0. 

That means that they are in the limit area.  
'''
error = 10**(-2.1)
space = np.linspace(-1,1,100)

z = [[ equation(i,j,w) for i in space] for j in space ]

plt.contour(space,space, z, 0, colors=['black'],linewidths=2 )

for l in labels:
	
	index = np.where(y == l)
	plt.scatter(training_sample[index, 0],
                    training_sample[index,1],
                    c=colors[l],
                    label=str(l))

plt.title('Quadratic regression fit')
plt.xlabel('$x_1$ value')
plt.ylabel('$x_2$ value')
plt.legend( loc = 'lower left')
plt.show()
Esempio n. 46
0
def plot(X, Y, clf, show=True, dataOnly=False):

    plt.figure()
    # plot data points
    X1 = X[Y == 0]
    X2 = X[Y == 1]
    Y1 = Y[Y == 0]
    Y2 = Y[Y == 1]
    class1 = plt.scatter(X1[:, 0],
                         X1[:, 1],
                         zorder=10,
                         cmap=plt.cm.Paired,
                         edgecolor='k',
                         s=20)
    class2 = plt.scatter(X2[:, 0],
                         X2[:, 1],
                         zorder=10,
                         cmap=plt.cm.Paired,
                         edgecolor='k',
                         s=20)
    if not dataOnly:
        # get the range of data
        x_min = X[:, 0].min()
        x_max = X[:, 0].max()
        y_min = X[:, 1].min()
        y_max = X[:, 1].max()

        # sample the data space
        XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]

        # apply the model for each point
        Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
        Z = Z.reshape(XX.shape)

        # plot the partitioned space
        plt.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired)

        # plot hyperplanes
        plt.contour(XX,
                    YY,
                    Z,
                    colors=['k', 'k', 'k'],
                    linestyles=['--', '-', '--'],
                    levels=[-1, 0, 1],
                    alpha=0.5)

        # plot support vectors
        plt.scatter(clf.support_vectors_[:, 0],
                    clf.support_vectors_[:, 1],
                    edgecolors='g',
                    s=100,
                    linewidth=1)
    if dataOnly:
        plt.title('Data Set')
    else:
        if clf.kernel == 'rbf':
            plt.title(
                'Decision Boundary and Margins, C={}, gamma={} with {} kernel'.
                format(clf.C, clf.gamma, clf.kernel))
        elif clf.kernel == 'poly':
            plt.title(
                'Decision Boundary and Margins, C={}, degree={} with {} kernel'
                .format(clf.C, clf.degree, clf.kernel))
        else:
            plt.title(
                'Decision Boundary and Margins, C={} with {} kernel'.format(
                    clf.C, clf.kernel))

    plt.legend((class1, class2), ('Claas A', 'Class B'),
               scatterpoints=1,
               loc='upper left',
               ncol=3,
               fontsize=8)
    if show:
        plt.show()
Esempio n. 47
0
# code chunk 4.14 (grid estimation)
mu_grid = np.linspace(140, 160, 200)
sigma_grid = np.linspace(4, 9, 200)
post_list = [sum(norm.logpdf(d2, m, s)) for m in mu_grid for s in sigma_grid]
post_ll = np.concatenate(post_list, axis=0)

mu_grid_rep = np.repeat(mu_grid, 200)
sigma_grid_rep = np.tile(sigma_grid, 200)
len(post_ll) == len(mu_grid_rep) and len(post_ll) == len(sigma_grid_rep)
post_log_prob = post_ll + norm.logpdf(mu_grid_rep, 178, 20) + uniform.logpdf(
    sigma_grid_rep, 0, 50)
post_prob = np.exp(post_log_prob - max(post_log_prob))
X, Y = np.meshgrid(mu_grid, sigma_grid)
Z = post_prob.reshape(200, 200)
plt.contour(X, Y, Z)
plt.show()

post_prob_std = post_prob / sum(post_prob)
sample_rows = np.random.choice(range(len(post_prob)),
                               size=1000,
                               replace=True,
                               p=post_prob_std)
sample_mu = mu_grid_rep[sample_rows]
sample_sigma = sigma_grid_rep[sample_rows]
plt.scatter(sample_mu, sample_sigma)
plt.show()

sns.kdeplot(x=sample_mu, y=sample_sigma, color="b", linewidths=1)
plt.xlabel('mean')
plt.ylabel('sd')
Esempio n. 48
0
##			return 0
        return np.sin(x)

f(0, True)

X = [[[f(x) for x in slices]]]

 
# load json and create model
json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights("model.h5")
print("Loaded model from disk")
 
# evaluate loaded model on test data
loaded_model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])

y = loaded_model.predict(X)

A = np.reshape(y, (100,50))

plt.contour(A)
##
##plt.xticks([0,24,49], [0, 0.125, 0.25])
##plt.yticks([0, 49, 99], [0,format(np.pi/2, '.4f'),format(np.pi, '.4f')])

plt.show()
Esempio n. 49
0
def multi_site_iso_contour_circles(nc_grd,
                                   h_seed,
                                   ns,
                                   nqs,
                                   rs,
                                   fig_check_seed=True):
    '''
    SEED CIRCLES OF PARTICLES AT MULTIPLE SITES
    DEFINED AS POINTS ALONG AN ISOBATH
    
    h_seed --> isobath to take as contour line to seed particles along

    ns  --> number of sites along contour line
    
    nqs --> number of particles per each site

    rs --> radius in km of particles for each site
    '''
    [Ly, Lx] = nc_grd.variables['h'][:, :].shape
    # GET COORDINATES OF ISOBATH CONTOUR
    CD = plt.contour(nc_grd.variables['h'][:, :], [h_seed])
    len_cons = [
        len(CD.collections[0].get_paths()[i])
        for i in range(len(CD.collections[0].get_paths()))
    ]
    ind_max_len = np.asarray(len_cons).argmax()
    p = CD.collections[0].get_paths()[ind_max_len]
    x_iso = p.vertices[:, 0]
    y_iso = p.vertices[:, 1]

    #GET COORDINATES OF EACH SITE EVENLY DISTRIBUTED ALONG THE ISOBATH
    dpt = int(len(x_iso) / ns)
    #len_temp = len(x_iso[0::dpt])
    #diff_len = abs(len_temp - ns)
    #x_site = x_iso[::dpt][0:len_temp-diff_len]
    #y_site = y_iso[::dpt][0:len_temp-diff_len]

    #slicing below avoids asymmetry at lateral boundaries
    x_site = x_iso[dpt:-1:dpt]
    y_site = y_iso[dpt:-1:dpt]

    nsites = len(x_site)
    #print nsites
    nparticles = len(x_site) * nqs
    #print nparticles
    px_arr = np.zeros(nparticles, order='F')
    py_arr = np.zeros(nparticles, order='F')
    p_ind = 0
    for s in range(nsites):
        ##################################
        # FIND SITE CENTERS IN GRIDPOINTS
        ####################################
        r_grd = (rs * 1E3) * (np.mean(
            (nc_grd.variables['pm'][:, :] + nc_grd.variables['pn'][:, :]) / 2))

        ####################################
        # CREATE CIRCLE OF PARTICLES
        ####################################
        px_arr[p_ind:p_ind + nqs], py_arr[p_ind:p_ind + nqs] = part_circle(
            x_site[s], y_site[s], r_grd, nqs)
        p_ind += nqs

    buff = 3
    px_arr[px_arr >= Lx - 1] = Lx - buff
    py_arr[py_arr >= Ly - 1] = Ly - buff

    #REMOVE PARTICLES ON MASK
    px_arr, py_arr = nan_parts_on_mask(px_arr, py_arr, nc_grd)
    ####################################
    # SHOW USER FLOAT LOCATIONS ON GRID
    ####################################
    if fig_check_seed:
        mask = nc_grd.variables['mask_rho'][:, :]
        h = nc_grd.variables['h'][:, :].T
        mask_rho = np.copy(mask)
        mask_rho[mask_rho == 0] = np.nan
        plt.ion()
        fig_check = plt.figure(figsize=[24, 8])
        #h_con = plt.contour(h.T,colors='k',linewidths=2.5)
        plt.imshow(h.T * mask_rho, origin='lower', cmap=cm.jet)
        cbar_plot = plt.colorbar()
        cbar_plot.set_label(r'$h(m)$', fontsize=22)
        plt.plot(px_arr, py_arr, 'o', color='k', markersize=2.5)
        plt.title('INITIAL PARTICLE LOCATIONS')
        ax = plt.gca()
        ax.set_xlim([0, mask_rho.shape[1]])
        ax.set_ylim([0, mask_rho.shape[0]])
        move_on = raw_input('Press enter to continue...')
        plt.ioff()
        plt.close('all')

    return px_arr, py_arr, x_site, y_site
def temp_data():
    months = range(871)[-68:-8]
    avgs = []
    levels = range(17)[0:12]
    for l in levels:
        item = []
        for m in months:
            for latitude in tempnc.variables['air'][m][l]:
                item.append(np.mean(latitude))
        item = slice_per(item, 73)
        for elem in item:
            average = np.mean(elem)
            avgs.append(average)
    avgs = np.array(slice_per(avgs, 73))
    return avgs.T


if __name__ == "__main__":
    temp = temp_data()
    zonal = zonal_data()
    x = tempnc.variables['lat'][:]
    levels = tempnc.variables['level'][0:12]
    levels = levels[::-1]
    plt.contourf(x, levels, zonal, cmap='nipy_spectral', alpha=0.7)
    plt.colorbar()
    plt.contour(x, levels, temp, colors='black', levels=15)
    plt.xlabel("Latitude(°)")
    plt.ylabel("Altitude(mbar)")
    plt.savefig("windVtemp.png")
    plt.show()
Esempio n. 51
0
    x_vals, y_vals = np.linspace(x_left, x_right, grid_dot_num_x_plotting), \
                     np.linspace(y_left, y_right, grid_dot_num_y_plotting)
    xx_grid, yy_grid = np.meshgrid(x_vals, y_vals)

    for product in range(product_number):
        plt.figure(product + 1, figsize=figsize)
        plt.title('Изначальное разбиение для %i-го продукта' % (product + 1))
        plt.grid(True)
        plt.axis([x_left, x_right, y_left, y_right])

        z = indicator(xx_grid, yy_grid, psi_initial, tau_initial, product)
        in_partition = np.unique(z)
        in_partition = in_partition[in_partition >= 0]
        cf = plt.contour(x_vals,
                         y_vals,
                         z,
                         levels=in_partition,
                         cmap=ListedColormap(['black']))
        plt.contour(x_vals,
                    y_vals,
                    density_vector[product](xx_grid, yy_grid),
                    levels=[0.0],
                    cmap=ListedColormap(['black']))

        plt.plot(tau_initial[0, in_partition], tau_initial[1, in_partition],
                 tau_style)
        for p in in_partition:
            plt.text(tau_initial[0, p] + tau_text_shift,
                     tau_initial[1, p] + tau_text_shift,
                     '%i' % (p + 1),
                     fontsize=fontsize,
Esempio n. 52
0
    d = np.sqrt((2 * np.pi)**n * covar_det)
    fac = np.einsum('...k,kl,...l->...', position - mean, covar_inv,
                    position - mean)
    return np.exp(-fac / 2) / d


"""-----------function end----------------------------"""
x_range = np.linspace(min(ts_data[:, 0]), max(ts_data[:, 0]), 1000)
y_range = np.linspace(min(ts_data[:, 1]), max(ts_data[:, 1]), 1000)
X, Y = np.meshgrid(x_range, y_range)

d = np.empty(X.shape + (2, ))

d[:, :, 0] = X
d[:, :, 1] = Y
Z0 = bi_normal(d, mean0, cov0)
plt.contour(X, Y, Z0)
Z1 = bi_normal(d, mean1, cov1)
plt.contour(X, Y, Z1)

Z = (Z1 - Z0)
plt.contour(X, Y, Z)

plt.title(
    'Iso-probability contour (case4 a1=3,b1=4,c1=2,d1=7/ a2=14,b2=10,c2=12,d2=16)'
)
plt.scatter(ts_data[:, 0], ts_data[:, 1])
plt.ylabel('x2')
plt.xlabel('x1')
plt.show()
Esempio n. 53
0
  print('no max gg:',np.max(gg0[n,:]))
  print('t0 max:',np.max(t0[n,:]*10./3.),'fs')
  plt.scatter(px0[n,t0[n,:]*10./3.<1000], py0[n,t0[n,:]*10./3.<1000], c=t0[n,t0[n,:]*10./3.<1000]*10./3., norm=colors.Normalize(vmin=0,vmax=1000), s=3, cmap='rainbow', edgecolors='None', alpha=1,zorder=3)
  #cbar=plt.colorbar( ticks=np.linspace(0, 500, 3) ,pad=0.005)
  #cbar.ax.set_yticklabels(cbar.ax.get_yticklabels(), fontsize=font_size)
  #cbar.set_label('$\gamma$',fontdict=font)
  px = np.linspace(-400,15400,501)
  py = np.linspace(-800,800,401)

  px, py = np.meshgrid(px, py)

  R = (1+px**2+py**2)**0.5-px
  R[R<0.1]=0.1
  levels = np.logspace(1,3,5)
  print(np.min(R),np.max(R))
  plt.contour(px, py, R, levels=levels, norm=mcolors.LogNorm(vmin=levels.min(), vmax=levels.max()), linestyles='dashed', cmap='gist_gray',zorder=0,linewidths=2.5)
  #plt.contourf(px, py, R, levels=levels, norm=mcolors.LogNorm(vmin=levels.min(), vmax=levels.max()), cmap=mycolor_jet)
  #cbar=plt.colorbar(pad=0.1, ticks=np.logspace(0,3,7))#,orientation="horizontal")
  #cbar.ax.set_yticklabels(cbar.ax.get_yticklabels(), fontsize=font_size)
  #cbar.set_label('R', fontdict=font)

   
#  plt.scatter(px0[condition], py0[condition], c=t0[condition], norm=colors.Normalize(vmin=0,vmax=400), s=20, cmap='nipy_spectral', marker=(5, 1), edgecolors='None', alpha=1)
#  plt.scatter(px1[condition], py1[condition], c=t1[condition], norm=colors.Normalize(vmin=0,vmax=400), s=20, cmap='winter', edgecolors='None', alpha=1)
#  cbar=plt.colorbar( ticks=np.linspace(0, 2000, 5) ,pad=0.005)
#  cbar.ax.set_yticklabels(cbar.ax.get_yticklabels(), fontsize=20)
#  cbar.set_label('$\gamma$',fontdict=font)


  plt.xlabel('$p_x$ [$m_ec$]',fontdict=font)
  plt.ylabel('$p_y$ [$m_ec$]',fontdict=font)
Esempio n. 54
0
def radial_feature_seed(nc_grd,
                        nc_out,
                        nc_tind,
                        field_look='div',
                        radial_inputs=[]):
    """
    SEED PARTICLES RADIALLY AROUND A 
    FEATURE (i.e., front or filament)

    USES GUI-INTERACTIVE PLOTTING WHERE USER
    CLICKS ON FEATURE AND INPUTS RADIUS OF PARTICLE
    CIRCLE AND NUMBER OF PARTICLES TO SEED WITHIN
    THAT RADIAL DISTRIBUTION
    
   
    nc_grd --> NETCDF object of grid-file

    nc_out --> NETCDF object of file to use to look at
	       2D map of fields to place particles
 
    nc_tind --> time-step w/in netcdf file to look at fields (ZERO-BASED INDEXING)

    field_look --> 'temp', 'salt', 'div', 'vort'
                    field to look at to place particles
		    (default is surface divergence), all other
		    options are for surface fields



    radial_inputs---> list containing 2 components
                      [r_km, nparticles]
		        radius, number of particles

			defualt is [] (empty) and in default
			mode, the user will be prompted to enter
			in these values
			
    """

    #############################################
    # 		LOAD GRID DATA
    ##############################################
    mask = nc_grd.variables['mask_rho'][:, :]
    mask_rho = np.copy(mask)
    mask_rho[mask_rho == 0] = np.nan
    pm = nc_grd.variables['pm'][:, :]
    pn = nc_grd.variables['pn'][:, :]
    h = nc_grd.variables['h'][:, :]
    f = nc_grd.variables['f'][:, :]

    #############################################
    # LOAD OUTPUT DATA TO VISUALIZE FOR SEEDING
    #############################################
    if field_look == 'div':
        try:
            div = calc_div(nc_out.variables['u'][nc_tind, -1, :, :],
                           nc_out.variables['v'][nc_tind, -1, :, :], pm, pn, f)
        except:
            div = calc_div(nc_out.variables['ubar'][nc_tind, :, :],
                           nc_out.variables['vbar'][nc_tind, :, :], pm, pn, f)

        field_plot = (div * mask_rho) / f
        cmap_plot = cm.seismic
        cbar_label = r'$\delta / f$'
        min_max = [-5, 5]

    if field_look == 'vort':
        try:
            vort = calc_vort(nc_out.variables['u'][nc_tind, -1, :, :],
                             nc_out.variables['v'][nc_tind,
                                                   -1, :, :], pm, pn, f)
        except:
            vort = calc_vort(nc_out.variables['ubar'][nc_tind, :, :],
                             nc_out.variables['vbar'][nc_tind, :, :], pm, pn,
                             f)

        field_plot = (psi2rho(vort) * mask_rho) / f
        cmap_plot = cm.seismic
        cbar_label = r'$\zeta / f$'
        min_max = [-10, 10]

    if field_look == 'temp':
        field_plot = nc_out.variables['temp'][nc_tind, -1, :, :] * mask_rho
        cmap_plot = cm.rainbow
        cbar_label = r'$SST \left(^{\circ} C\right)'
        min_max = [np.min(field_plot), np.max(field_plot)]

    if field_look == 'salt':
        field_plot = nc_out.variables['salt'][nc_tind, -1, :, :] * mask_rho
        cmap_plot = cm.rainbow
        cbar_label = r'$SSS'
        min_max = [np.min(field_plot), np.max(field_plot)]

    ##############################################
    # PLOT AND PROMPT USER TO CLICK ON PLOT
    '''
    ALL OF THIS HAPPENS IN WHILE LOOP
    UNTIL USER IS SATISFIED WITH PARTICLE DISTRIBUTION
    SO USER CAN RE-SELECT FEATURE, RADIUS, AND # OF PARTICLES
    UNTIL THEY HAVE DISTRIBUTION THEY WANT
    '''
    ##############################################
    choose_again = 'Y'
    while choose_again == 'Y':
        plt.ion()
        plt.figure(figsize=[14, 7])
        h_con = plt.contour(h, colors='k', linewidths=2.5)
        plt.imshow(field_plot,
                   origin='lower',
                   cmap=cmap_plot,
                   vmin=min_max[0],
                   vmax=min_max[1])
        cbar_plot = plt.colorbar()
        cbar_plot.set_label(cbar_label, fontsize=22)
        plt.title('INSTRUCTIONS FOR CLICKING: ' + '\n' +
                  'Select point: left click' + '\n' +
                  'Cancel last input: right click' + '\n' +
                  'Stop click recording: middle click')
        coords = plt.ginput(n=-1,
                            show_clicks=True,
                            mouse_add=1,
                            mouse_pop=3,
                            mouse_stop=2)

        #######################################################
        # ASK USER HOW MANY PARTICLES AND WHAT RADIUS THEY WANT
        ########################################################
        if radial_inputs == []:
            print '################################################'
            print ''
            print ''
            print ''
            r_km = input('Radius of circle (in km)?  ')
            print ''
            nparticles = input('How many total particles?  ')

            print '##################################################'
        #CONVERT RADIUS TO GRID-POINT UNITS

        else:
            r_km = radial_inputs[0]
            nparticles = radial_inputs[1]

        r_grdpt = (r_km * 1E3) * np.mean((pm + pn) / 2)

        ###############################################
        # DISTRIBUTE PARTICLES INSIDE CRICLE
        ################################################
        x_center = coords[0][0]
        y_center = coords[0][1]

        px_list = []
        py_list = []
        for n in range(nparticles):
            q = np.random.random() * 2 * np.pi
            r = np.sqrt(np.random.random())
            px_list.append(x_center + (r_grdpt * r) * np.cos(q))
            py_list.append(y_center + (r_grdpt * r) * np.sin(q))

        px_arr = np.asarray(px_list, dtype='float64')
        py_arr = np.asarray(py_list, dtype='float64')

        ####################################
        # SHOW USER FLOAT LOCATIONS ON GRID
        ####################################
        fig_check = plt.figure(figsize=[14, 7])
        h_con = plt.contour(h, colors='k', linewidths=2.5)
        plt.imshow(field_plot,
                   origin='lower',
                   cmap=cmap_plot,
                   vmin=min_max[0],
                   vmax=min_max[1])
        cbar_plot = plt.colorbar()
        cbar_plot.set_label(cbar_label, fontsize=22)
        plt.plot(px_arr, py_arr, 'o', color='k', markersize=2.5)
        ax = plt.gca()
        ax.set_xlim([0, mask_rho.shape[1]])
        ax.set_ylim([0, mask_rho.shape[0]])

        ##########################################
        # PROMPT USER TO ENTER INPUTS AGAIN
        ##########################################
        user_in_bad = True
        while user_in_bad:
            temp_in = raw_input(
                'Choose feature / particle distribution again (Y/N)? ')
            if temp_in == 'Y' or temp_in == 'N':
                choose_again = temp_in
                user_in_bad = False
        plt.close('all')
##########################################################################################

    return px_arr - 1, py_arr - 1
Esempio n. 55
0
def main(args):
    # Create a random generator with a given seed
    generator = np.random.RandomState(args.seed)

    # Generate an artifical regression dataset
    data, target = sklearn.datasets.make_classification(
        n_samples=args.data_size, n_features=2, n_informative=2, n_redundant=0, random_state=args.seed)

    # Use `sklearn.model_selection.train_test_split` method call, passing
    # arguments `test_size=args.test_size, random_state=args.seed`.

    # Append a constant feature with value 1 to the end of every input data
    data = np.concatenate([data, np.ones([data.shape[0], 1])], axis=1)
    # Split the dataset into a train set and a test set.
    train_data, test_data, train_target, test_target = sklearn.model_selection.train_test_split(data, target,
                                                                                                test_size=args.test_size,
                                                                                                random_state=args.seed)

    # Generate initial linear regression weights
    weights = generator.uniform(size=train_data.shape[1])

    for iteration in range(args.iterations):
        permutation = generator.permutation(train_data.shape[0])

        # Process the data in the order of `permutation`.
        # You can assume that `args.batch_size` exactly divides `train_data.shape[0]`.
        gradient, gradient_components = 0, 0
        for i in permutation:
            gradient += (1 / (1 + np.exp(-train_data[i] @ weights)) - train_target[i]) * train_data[i]
            gradient_components += 1
            if gradient_components == args.batch_size:
                weights -= args.learning_rate * gradient / gradient_components
                gradient, gradient_components = 0, 0
        assert gradient_components == 0

        # After the SGD iteration, measure the average loss and accuracy for both the
        # train test and the test set. The loss is the average MLE loss (i.e., the
        # negative log likelihood, or crossentropy loss, or KL loss) per example.

        train_pred = np.round(1 / (1 + np.exp(- train_data @ weights))).astype(int)
        train_accuracy = sum(train_pred == train_target) / train_data.shape[0]
        train_loss = sklearn.metrics.log_loss(train_target, train_pred)

        test_pred = np.round(1 / (1 + np.exp(-test_data @ weights))).astype(int)
        test_accuracy = sum(test_pred == test_target) / test_data.shape[0]
        test_loss = sklearn.metrics.log_loss(test_target, test_pred)
        print("After iteration {}: train loss {:.4f} acc {:.1f}%, test loss {:.4f} acc {:.1f}%".format(
            iteration + 1, train_loss, 100 * train_accuracy, test_loss, 100 * test_accuracy))

        if args.plot:
            import matplotlib.pyplot as plt
            if args.plot is not True:
                if not iteration: plt.figure(figsize=(6.4 * 3, 4.8 * (args.iterations + 2) // 3))
                plt.subplot(3, (args.iterations + 2) // 3, 1 + iteration)
            xs = np.linspace(np.min(data[:, 0]), np.max(data[:, 0]), 50)
            ys = np.linspace(np.min(data[:, 1]), np.max(data[:, 1]), 50)
            predictions = [[1 / (1 + np.exp(-([x, y, 1] @ weights))) for x in xs] for y in ys]
            plt.contourf(xs, ys, predictions, levels=21, cmap=plt.cm.RdBu, alpha=0.7)
            plt.contour(xs, ys, predictions, levels=[0.25, 0.5, 0.75], colors="k")
            plt.scatter(train_data[:, 0], train_data[:, 1], c=train_target, marker="P", label="train", cmap=plt.cm.RdBu)
            plt.scatter(test_data[:, 0], test_data[:, 1], c=test_target, label="test", cmap=plt.cm.RdBu)
            plt.legend(loc="upper right")
            if args.plot is True:
                plt.show()
            else:
                plt.savefig(args.plot, transparent=True, bbox_inches="tight")

    return weights
Esempio n. 56
0
    mantle = int(np.min(np.argwhere((pdist[1] + pdist[0] < r))))
    lower_crust = int(np.min(np.argwhere(
        (pdist[2] + pdist[1] + pdist[0] < r))))
    upper_crust = lower_crust + 1
    record = 20
    x = r / 1000
    y = t / 1e6

    fig = plt.figure(figsize=(10, 10))
    X, Y = np.meshgrid(x,
                       y)  # `plot_surface` expects `x` and `y` data to be 2D
    surf = plt.contourf(Y, X, T, 15)
    CS = plt.contour(Y,
                     X,
                     T,
                     levels=[1650, 1750],
                     colors=('k', ),
                     linestyles=('-', ),
                     linewidths=(2, ))
    plt.clabel(CS, fmt='%2.1d', colors='k', fontsize=14)
    plt.hlines(r[core] / 1000, y[record], np.max(Y), color='red')
    plt.hlines(r[mantle] / 1000, y[record], np.max(Y), color='red')
    plt.axvline(y[record], 0, 500, color='red')
    plt.title('2-zone model,Canonical Al & Fe, accretion at ' +
              str(t[0] / 1e6))
    # if case == 2:
    #     plt.title('4-zone model,Canonical Al & Fe, accretion at '+ str(t[0]/1e6))
    plt.xlabel('Time (Myr)')
    plt.ylabel('Radius (km)')
    cbar = fig.colorbar(surf, label='Temperature (K)')
Esempio n. 57
0
def variable_sweep(problem):
    from matplotlib import rcParams
    rcParams['font.family'] = 'times new roman'
    # rcParams['font.times-new-roman'] = ['times new roman']

    number_of_points = 5
    outputs = carpet_plot(problem, number_of_points, 0,
                          0)  #run carpet plot, suppressing default plots
    inputs = outputs.inputs
    objective = outputs.objective
    constraints = outputs.constraint_val
    plt.figure(0)
    CS = plt.contourf(inputs[0, :], inputs[1, :], objective, 20, linewidths=2)
    cbar = plt.colorbar(CS)

    cbar.ax.set_ylabel('Fuel Burn (kg)')
    CS_const = plt.contour(inputs[0, :],
                           inputs[1, :],
                           constraints[-1, :, :],
                           cmap=plt.get_cmap('hot'))
    plt.clabel(CS_const, inline=1, fontsize=12, family='times new roman')
    cbar = plt.colorbar(CS_const)
    # plt.FontProperties(family='times new roman', style='italic', size=12)
    cbar.ax.set_ylabel('BOW (kg)')
    # font = matplotlib.font_manager.FontProperties(family='times new roman', style='italic', size=12)

    # CS_const.font_manager.FontProperties.set_family(family='times new roman')

    plt.xlabel('Wing Area (m^2)')
    plt.ylabel('Aspect Ratio (-)')

    plt.legend(loc='upper left')
    # plt.show(block=True)
    plt.show()

    number_of_points = 5
    outputs = carpet_plot(
        problem, number_of_points, 0, 0, sweep_index_0=1,
        sweep_index_1=3)  # run carpet plot, suppressing default plots
    inputs = outputs.inputs
    objective = outputs.objective
    constraints = outputs.constraint_val
    plt.figure(0)
    CS = plt.contourf(inputs[0, :], inputs[1, :], objective, 20, linewidths=2)
    cbar = plt.colorbar(CS)

    cbar.ax.set_ylabel('Fuel Burn (kg)')
    CS_const = plt.contour(inputs[0, :],
                           inputs[1, :],
                           constraints[-1, :, :],
                           cmap=plt.get_cmap('hot'))
    plt.clabel(CS_const, inline=1, fontsize=10)
    cbar = plt.colorbar(CS_const)
    cbar.ax.set_ylabel('BOW (kg)')

    plt.xlabel('AR (-)')
    plt.ylabel('Sweep Angle (Deg)')

    plt.legend(loc='upper left')
    plt.show()

    number_of_points = 5
    outputs = carpet_plot(
        problem, number_of_points, 0, 0, sweep_index_0=2,
        sweep_index_1=3)  # run carpet plot, suppressing default plots
    inputs = outputs.inputs
    objective = outputs.objective
    constraints = outputs.constraint_val
    plt.figure(0)
    CS = plt.contourf(inputs[0, :], inputs[1, :], objective, 20, linewidths=2)
    cbar = plt.colorbar(CS)

    cbar.ax.set_ylabel('Fuel Burn (kg)')
    CS_const = plt.contour(inputs[0, :],
                           inputs[1, :],
                           constraints[-1, :, :],
                           cmap=plt.get_cmap('hot'))
    plt.clabel(CS_const, inline=1, fontsize=10)
    cbar = plt.colorbar(CS_const)
    cbar.ax.set_ylabel('BOW (kg)')

    plt.xlabel('t/c (-)')
    plt.ylabel('Sweep Angle (Deg)')

    plt.legend(loc='upper left')
    plt.show(block=True)

    return
Esempio n. 58
0
plt.subplot(132)
plt.imshow(U[groups.index(gr), :].reshape((-1, 1)),
           cmap=newcmp,
           vmin=-0.4,
           vmax=0.4)
plt.grid(None)
plt.colorbar()
plt.yticks(range(10), components_names)
plt.xticks([])
plt.title("Encoding")

plt.subplot(133)
plt.imshow(sz, extent=(*x_range, *z_range[::-1]), alpha=sz.astype(float))
plt.contour(grid_x,
            grid_y,
            szr,
            extent=(*x_range, *z_range[::-1]),
            levels=[0.5])
plt.plot(*batter_outline(x=-0.8),
         scalex=False,
         scaley=False,
         color="white",
         linewidth=4)
plt.plot(*strike_zone(),
         scalex=False,
         scaley=False,
         color="white",
         linewidth=1,
         linestyle="--")
plt.gca().set_xticklabels([])
plt.gca().set_yticklabels([])
Esempio n. 59
0
def rosen(x_o=[-1.5,1.0],a=20.0,eta=0.001,threshold=0.001,maxiter=1000,alpha=0.0,anim = 1,up = 1.1,down = 0.9,reduce = 0.5):
    it = 0
    x1 = np.linspace(-2,2,201)
    
    x2 = np.linspace(-1,3,201)
    
    [X,Y] = np.meshgrid(x1,x2)
    
    Y = (1-X)**2 + a*(Y-X**2)**2
    
    v = np.linspace(math.floor(a/80)+3,Y.max(),math.floor(a))

    plt.clf()      
    plt.contour(Y,v)
    plt.xticks([0,50,100,150,200],[-2, -1, 0, 1, 2])
    plt.yticks([0,50,100,150,200],[-1, 0, 1, 2, 3])
    ax = plt.gca()
    ax.set_aspect('equal','box')
    
    plt.tight_layout()
    
    plt.plot(150,100,'b.')
    
    f = (1-x_o[0])**2+a*(x_o[1]-x_o[0]**2)**2
    fold = f
    minf = f

    gradold = np.array([0,0])
    
    eta1 = eta
    eta2 = eta
    
    varx = np.array([0.0,0.0])
    ###Gradient Method####
    while it != maxiter:
    
        grad = np.array([-2.0*(1-x_o[0])-4.0*a*(x_o[1]-x_o[0]**2)*x_o[0], 2.0*a*(x_o[1]-x_o[0]**2)])
        
        x_old = np.asarray(x_o)
 
        if (f>minf and reduce < 1):
            x_o[0] = minx1
            x_o[1] = minx2
            
            grad[0] = mingrad1
            grad[1]= mingrad2
            
            varx = np.array([0.0,0.0])
            
            eta1 = eta1*reduce
            eta2 = eta2*reduce
            
            gradold[0] = 0
            gradold[1] = 0
            
            fold = f
            f = minf
        else:
            minf = f
            
            minx1 = x_o[0]
            minx2 = x_o[1]
            
            mingrad1 = grad[0]
            mingrad2 = grad[1]
            
            if grad[0]*gradold[0] >0:
                eta1 = eta1*up
            else:
                eta1= eta1*down
            
            if grad[1]*gradold[1] >0:
                eta2 = eta2*up
            else:
                eta2 = eta2*down
                
            varx[0] = alpha*varx[0]-(1-alpha)*grad[0]
            varx[1] = alpha*varx[1]-(1-alpha)*grad[1]
            
            x_o[0] = x_o[0] + eta1*varx[0]
            x_o[1] = x_o[1] + eta2*varx[1]
            
            gradold = grad
            fold = f
 
        try:
            f = (x_o[0]-1)**2 + a*(x_o[1]-x_o[0]**2)**2
            if (f < threshold or fold < threshold):
                break
            else:
                if anim:
                    plt.plot([50*x_old[0]+100, 50*x_o[0]+100],[50*x_old[1]+50,50*x_o[1]+50],'r.-')
                    plt.xticks([0,50,100,150,200],[-2, -1, 0, 1, 2])
                    plt.yticks([0,50,100,150,200],[-1, 0, 1, 2, 3])
                    ax = plt.gca()
                    ax.set_aspect('equal','box')
                    plt.tight_layout()
                    plt.pause(0.1)
                it += 1
        except:
            print('Diverged!')
            plt.show()
            break
        
    if it == maxiter:
        print('Did not converge in %d steps, f = %f' %(it,f))
        plt.show()
        return x_o
    else:
        print('Converged in %d steps, f = %f' %(it+1,f))
        plt.show()
        return x_o
def contourPlot(DI,
                varName,
                levels,
                ticks,
                figName,
                ZvarName='mixf',
                addZstContour=False,
                means_rms='means'):

    print("\nMaking contour plot for figure: ", figName, "\n")

    fname = DI['pdir'] + means_rms + "_" + varName + ".dat"

    data = np.loadtxt(fname)
    x = data[:, 0]
    #x     = x-np.average(x)
    var = data[:, 1:]

    y = get_axialLocations(DI, forceGetDumpTimes=False)
    D = get_inputFileParameter(DI, ("initParams", "d_f"))
    x = x / D
    y = y / D

    X, Y = np.meshgrid(x, y)

    #------------------ plot

    aspectRatio = (np.max(y) - np.min(y)) / (np.max(x) - np.min(x))
    plt.figure(figsize=(4, 2.2 * aspectRatio))
    plt.rc("font", size=16)

    if levels != []:
        C = plt.contourf(X,
                         Y,
                         var.T,
                         levels=levels,
                         extend='max',
                         cmap=cm.viridis)
    else:
        C = plt.contourf(X, Y, var.T, 50, cmap=cm.viridis)

    #C.cmap.set_over('k')     # color for high out of range values
    #C.cmap.set_under('k')    # color for low out of range values
    ax = plt.gca()
    ax.axis((np.min(x), np.max(x), np.min(y), np.max(y)))
    ax.set_xbound(lower=np.min(x), upper=np.max(x))
    ax.set_ybound(lower=np.min(y), upper=np.max(y))
    ax.set_aspect('equal')
    plt.xlabel('position (m)', fontsize=16)
    plt.ylabel('axial position (m)', fontsize=16)
    plt.colorbar(ticks=ticks)
    #plt.xticks((-0.2,0,0.2))
    #plt.yticks(())
    #ax.grid(True, color='w', linewidth=1, linestyle = ':')

    if addZstContour:
        zstoic = get_fstoic(DI)
        Z = np.loadtxt(DI['pdir'] + "means_" + ZvarName + '.dat')
        Z = Z[:, 1:]
        plt.contour(X,
                    Y,
                    Z.T,
                    levels=np.array([zstoic]),
                    colors='white',
                    linewidth=0.1)

    #plt.show()
    plt.savefig(figName)