Exemplo n.º 1
0
	def get_top_mas(self, list_of_mas, list_of_gene_ids, threshold, outfname):
		"""
		05-09-05
			start
		06-07-05
			if top_percentage is less than 200, use 200.
		"""
		sys.stderr.write("Getting the std/mean >=%s edges..."%threshold)
		writer = csv.writer(open(outfname, 'w'), delimiter='\t')
		print len(list_of_mas), len(list_of_gene_ids)
		for i in range(len(list_of_mas)):
			ma = list_of_mas[i]
			std = MLab.std(ma.compressed())	#disregard the NAs
			value = std/MLab.mean(ma.compressed())
			if self.debug:
				print list_of_gene_ids[i]
				print ma
				print std
				print value
				raw_input("y/n")
			if value>=threshold:
				ls_with_NA_filled = self.ls_NA_fillin(ma)
				writer.writerow([list_of_gene_ids[i]]+ls_with_NA_filled)
		del writer
		sys.stderr.write("Done.\n")
Exemplo n.º 2
0
 def menu_do_fit(self):
    """ Private method """
    # Compute 2-theta for each ROI
    for i in range(self.nrois):
       if (self.roi[i].energy == 0) or (self.roi[i].d_spacing == 0):
          self.two_theta[i] = 0.
       else:
          self.two_theta[i] = 2.0 * math.asin(12.398 / 
             (2.0*self.roi[i].energy*self.roi[i].d_spacing))*180./math.pi
       self.widgets.two_theta[i].configure(text=('%.5f' % self.two_theta[i]))
    # Find which ROIs should be used for the calibration
    use = []
    for i in range(self.nrois):
       if (self.roi[i].use): use.append(i)
    nuse = len(use)
    if (nuse < 1):
       tkMessageBox.showerror(title='mcaCalibateEnergy Error', 
          message='Must have at least one valid point for calibration')
       return
    two_theta=[]
    for u in use:
       two_theta.append(self.two_theta[u])
    self.calibration.two_theta = MLab.mean(two_theta)
    sdev = MLab.std(two_theta)
    self.widgets.two_theta_fit.setentry(
                               ('%.5f' % self.calibration.two_theta)
                               + ' +- ' + ('%.5f' % sdev))
    for i in range(self.nrois):
       two_theta_diff = self.two_theta[i] - self.calibration.two_theta
       self.widgets.two_theta_diff[i].configure(text=('%.5f' % two_theta_diff))
    self.mca.set_calibration(self.calibration)
Exemplo n.º 3
0
def view(arr,title="",autolevel=1):
    "show Numeric array"
    global root,saveimgs

    if not title:
        # automatic titles are the line of code used to call view()
        caller=sys._getframe(1)
        lines,startnum=inspect.getsourcelines(caller)
        title=lines[caller.f_lineno-startnum]

    # autoscale brightness
    arr=abs(arr) # take magnitude of complex values
    if autolevel:
        maxval=MLab.max(MLab.max(arr)) # find maximum
        if maxval>0:
            arr=arr*255/maxval # scale so 255 is the max
    arr=arr.astype(Numeric.UnsignedInt8)

    size=arr.shape[1],arr.shape[0]
    im=Image.fromstring("L",size,arr.tostring())
  
    #im=im.resize((500,500))
    
    image = ImageTk.PhotoImage(im)
    saveimgs.append(image)

    f=tk.Frame(root, relief='raised',bd=2)
    tk.Label(f,text=title).pack(side='bottom')
    x = tk.Label(f, image=image)
    x.pack(side='top')
    f.pack(side='left')
Exemplo n.º 4
0
 def menu_do_fit(self):
     """ Private method """
     # Compute 2-theta for each ROI
     for i in range(self.nrois):
         if (self.roi[i].energy == 0) or (self.roi[i].d_spacing == 0):
             self.two_theta[i] = 0.
         else:
             self.two_theta[i] = 2.0 * math.asin(
                 12.398 / (2.0 * self.roi[i].energy *
                           self.roi[i].d_spacing)) * 180. / math.pi
         self.widgets.two_theta[i].configure(text=('%.5f' %
                                                   self.two_theta[i]))
     # Find which ROIs should be used for the calibration
     use = []
     for i in range(self.nrois):
         if (self.roi[i].use): use.append(i)
     nuse = len(use)
     if (nuse < 1):
         tkMessageBox.showerror(
             title='mcaCalibateEnergy Error',
             message='Must have at least one valid point for calibration')
         return
     two_theta = []
     for u in use:
         two_theta.append(self.two_theta[u])
     self.calibration.two_theta = MLab.mean(two_theta)
     sdev = MLab.std(two_theta)
     self.widgets.two_theta_fit.setentry(('%.5f' %
                                          self.calibration.two_theta) +
                                         ' +- ' + ('%.5f' % sdev))
     for i in range(self.nrois):
         two_theta_diff = self.two_theta[i] - self.calibration.two_theta
         self.widgets.two_theta_diff[i].configure(text=('%.5f' %
                                                        two_theta_diff))
     self.mca.set_calibration(self.calibration)
Exemplo n.º 5
0
def plot_matrix(z, r_x=0, r_y=0, filename=0, colorcode=0):

    if filename:
        print "Saving to file not supported."

    import MLab, Tkinter, ImageTk
        
    # Scale z and find appropriate colormap.
    
    zmax = MLab.max(MLab.max(z))
    zmin = MLab.min(MLab.min(z))

    if (zmin < 0) and (0 < zmax) :
        colormap = create_bipolar_colormap()
        zmax = MLab.max([-zmin, zmax])
        z += zmax
        z *= (len(colormap)-1)/(2*zmax)
    else:
        if colorcode == whiteblack:
            colormap = create_white_black_colormap()
        elif colorcode == blackwhite:
            colormap = create_black_white_colormap()    
        else:
            colormap = create_unipolar_colormap()

        z -= zmin
        if (zmax != zmin):
            z *= (len(colormap)-1)/(zmax-zmin)
            
    # Put picture on canvas.

    root = Tkinter.Tk()  
    pic = _plot_scaled_matrix(root, colormap, z, r_x, r_y)
    pic.pack()
    root.mainloop()
Exemplo n.º 6
0
 def MLab_minmax(self, x):
     """Use MLab's min/max functions repeatedly."""
     import MLab
     xmin = x; xmax = x
     for i in iseq(len(x.shape)-1,0,-1):
         xmin = MLab.min(xmin, i)
         xmax = MLab.max(xmax, i)
     return xmin, xmax
Exemplo n.º 7
0
	def transform_one_file(self, src_pathname, delimiter, outputdir, b_instance, threshold, no_of_valids, top_percentage, \
		take_log, divide_mean):
		"""
		08-09-05
			add type
		08-29-05
			add no_of_valids to cut genes with too few valid values
		12-22-05
			add top_percentage
			change log(x,2) to log(x) (natural number is base)
		"""
		reader = csv.reader(file(src_pathname), delimiter=delimiter)
		#1st round to read
		counter = 0
		std_counter_ls = []
		for row in reader:
			counter += 1
			gene_id = row[0]
			ma_array = self.get_ma_array_out_of_list(row[1:], take_log, round_one=1)	#12-22-05
			"""
			if self.debug:
				print "The data vector is ",ma_array
				print "Its mask is ", ma_array.mask()
			"""
			if len(ma_array.compressed())>=no_of_valids:	#at least two samples, otherwise, correlation can't be calculated
				#08-29-05	no_of_valids controls not too many NA's, which is for graph_modeling
				std = MLab.std(ma_array.compressed())	#disregard the NAs
				if divide_mean:
					ratio = std/MLab.mean(ma_array.compressed())
				else:
					ratio = std
				"""
				if self.debug:
					print "std is ",std
					print "ratio is ", ratio
					raw_input("Continue?(Y/n)")
				"""
				std_counter_ls.append([ratio, counter])
		del reader
		
		qualified_counter_set = self.get_qualified_counter_set(std_counter_ls, top_percentage)
		
		#2nd round to read,  and write out
		reader = csv.reader(file(src_pathname), delimiter=delimiter)
		filename = os.path.basename(src_pathname)
		output_filename = os.path.join(outputdir, filename)
		writer = csv.writer(open(output_filename, 'w'), delimiter=delimiter)
		counter = 0
		for row in reader:
			counter += 1
			if counter in qualified_counter_set:
				gene_id = row[0]
				ma_array = self.get_ma_array_out_of_list(row[1:], take_log)
				writer.writerow([gene_id] + b_instance.ls_NA_fillin(ma_array))
		del reader, writer
Exemplo n.º 8
0
def plot2d(arr, size=(700, 600), palette=gray, dontscale=False):

    if dontscale:
        im = pilutil.toimage(MLab.flipud(arr), pal=palette, cmin=0., cmax=1.)
    else:
        im = pilutil.toimage(MLab.flipud(arr), pal=palette)

    if size:
        im = im.resize(size)

    im.show()
Exemplo n.º 9
0
 def plain_simulate(self, no_of_monte_carlos, no_of_samplings, debug, report):
     result_ls = []
     for j in range(no_of_monte_carlos):
         result = 0
         for i in range(no_of_samplings):
             x = random.random()
             result += math.sin(1 / x) * math.sin(1 / x)
         result /= no_of_samplings
         print "%s: %s" % (j, result)
         result_ls.append(result)
     print "mean: %s, std: %s" % (MLab.mean(result_ls), MLab.std(result_ls))
Exemplo n.º 10
0
    def setArray(self, arr, palette=palettes["brownish"]):
        self.array = arr

        # if no_scaling: #for subplots and stuff like that...
        #  im = pilutil.toimage(MLab.flipud(arr), pal = palette, cmin = 0., cmax = 1.)
        self.image = pilutil.toimage(MLab.flipud(arr), pal=palette)
        self.afterResize()
Exemplo n.º 11
0
	def get_top_mas(self, list_of_mas, top_percentage):
		"""
		05-09-05
			start
		06-07-05
			if top_percentage is less than 200, use 200.
		"""
		sys.stderr.write("Getting the top %s std edges..."%top_percentage)
		list_of_stds = []
		for ma in list_of_mas:
			std = MLab.std(ma.compressed())	#disregard the NAs
			list_of_stds.append(std)
		top_number = int(len(list_of_stds)*top_percentage)	#how many we want
		if top_number<200:	#06-07-05	200 is the bottom line.
			top_number = 200
		arg_list  = argsort(list_of_stds)	#sort it, ascending
		arg_list = arg_list.tolist()	#convert from array to list
		arg_list.reverse()	#reverse, descending order
		top_arg_list = arg_list[:top_number]	#get the top_number of arg_list	#06-07-05 if top_number>len(arg_list), it's ok.
		if self.debug:
			print "list_of_stds is %s"%repr(list_of_stds)
			print "top_number is %s"%top_number
			print "arg_list is %s"%repr(arg_list)
			print "top_arg_list is %s"%repr(top_arg_list)
		list_of_top_mas = []
		for index in top_arg_list:
			list_of_top_mas.append(list_of_mas[index])
		sys.stderr.write("Done.\n")
		return list_of_top_mas
Exemplo n.º 12
0
 def setPalette(self, palette):
     self.palette = palette
     if self.array:
         self.image = pilutil.toimage(MLab.flipud(self.array), pal=palette)
         self.doPlotImage()
     if self.legend:
         self.legend.setImagePlot(self)
def calculateStats(results, Fields, Filter):
    """ Calculates S_ (Sum), A_ (Avg/mean), and D_ (std Deviation) for each field"""
    try:
        for field in Fields:
            #print field,field,results[field]
            parts = field.split('_')
            if len(parts) > 1: prefix = parts[0] + '_'
            else: prefix = ''
            if len(results[field]) > 1:
                results['S_' + field] = MLab.sum(results[field])
                results['A_' + field] = MLab.mean(results[field])
                results['D_' + field] = MLab.std(
                    results[field]) / (MLab.sqrt(len(results[field])) - 1)
            else:
                results['S_' + field] = results[field][0]
                results['A_' + field] = results[field][0]
                results['D_' + field] = 0.0
            Filtered = MLab.nonzero(
                results[prefix + 'TgtFound'][:len(results[field]) - 1])
            if len(Filtered) > 1:
                TgtFoundResults = MLab.choose(Filtered, results[field])
                results['N_' + field] = MLab.mean(TgtFoundResults)
                results['E_' + field] = MLab.std(TgtFoundResults) / (
                    MLab.sqrt(len(TgtFoundResults)) - 1)
            else:
                results['N_' + field] = MLab.choose(Filtered,
                                                    results[field])[0]
                results['E_' + field] = 0.0
        results['RtngConfCorr'] = calculateCorrelation(results['NavConf'],
                                                       results['DirRtng'])
        results['RtngCorrCorr'] = calculateCorrelation(results['NavConf'],
                                                       results['TgtFound'])
        results['ConfCorrCorr'] = calculateCorrelation(results['DirRtng'],
                                                       results['TgtFound'])
        results['RtngEffcCorr'] = calculateCorrelation(results['NavConf'],
                                                       results['Efficiency'])
        results['ConfEffcCorr'] = calculateCorrelation(results['DirRtng'],
                                                       results['Efficiency'])
        results['A_CorrTgt'] = results['CorrTgt'] / float(results['IncrTgt'] +
                                                          results['CorrTgt'])
    except IndexError:
        pass  #print 'ERROR: Missing keys when calculating stats for', field, Filter
    except KeyError:
        pass  #print 'ERROR: Missing keys when calculating stats for', field, Filter
    except ValueError:
        print 'ERROR: Invalid entry in choice array', field, Filtered, len(
            results[field]), '\n', results[field]
Exemplo n.º 14
0
def SplitPercentile(List, numCuts):
    L = MLab.msort(Numeric.array(List))
    Percentiles = []
    count = len(L)
    for i in range(1, numCuts):
        p = i * 100.0 / numCuts
        Percentiles.append(percentile(L, int(p)))
    return Percentiles
Exemplo n.º 15
0
def rosen_der(x):
    xm = x[1:-1]
    xm_m1 = x[:-2]
    xm_p1 = x[2:]
    der = MLab.zeros(x.shape,x.typecode())
    der[1:-1] = 200*(xm-xm_m1**2) - 400*(xm_p1 - xm**2)*xm - 2*(1-xm)
    der[0] = -400*x[0]*(x[1]-x[0]**2) - 2*(1-x[0])
    der[-1] = 200*(x[-1]-x[-2]**2)
    return der
Exemplo n.º 16
0
    def drawmeridians(self,ax,meridians,color='k',linewidth=1., \
                      linestyle='--',dashes=[1,1]):
        """
 draw meridians (longitude lines).

 ax - current axis instance.
 meridians - list containing longitude values to draw (in degrees).
 color - color to draw meridians (default black).
 linewidth - line width for meridians (default 1.)
 linestyle - line style for meridians (default '--', i.e. dashed).
 dashes - dash pattern for meridians (default [1,1], i.e. 1 pixel on,
  1 pixel off).
        """
        if self.projection not in ['merc','cyl']:
            lats = N.arange(-80,81).astype('f')
        else:
            lats = N.arange(-90,91).astype('f')
        xdelta = 0.1*(self.xmax-self.xmin)
        ydelta = 0.1*(self.ymax-self.ymin)
        for merid in meridians:
            lons = merid*N.ones(len(lats),'f')
            x,y = self(lons,lats)
            # remove points outside domain.
            testx = N.logical_and(x>=self.xmin-xdelta,x<=self.xmax+xdelta)
            x = N.compress(testx, x)
            y = N.compress(testx, y)
            testy = N.logical_and(y>=self.ymin-ydelta,y<=self.ymax+ydelta)
            x = N.compress(testy, x)
            y = N.compress(testy, y)
            if len(x) > 1 and len(y) > 1:
                # split into separate line segments if necessary.
                # (not necessary for mercator or cylindrical).
                xd = (x[1:]-x[0:-1])**2
                yd = (y[1:]-y[0:-1])**2
                dist = N.sqrt(xd+yd)
                split = dist > 500000.
                if N.sum(split) and self.projection not in ['merc','cyl']:
                   ind = (N.compress(split,MLab.squeeze(split*N.indices(xd.shape)))+1).tolist()
                   xl = []
                   yl = []
                   iprev = 0
                   ind.append(len(xd))
                   for i in ind:
                       xl.append(x[iprev:i])
                       yl.append(y[iprev:i])
                       iprev = i
                else:
                    xl = [x]
                    yl = [y]
                # draw each line segment.
                for x,y in zip(xl,yl):
                    # skip if only a point.
                    if len(x) > 1 and len(y) > 1:
                        l = Line2D(x,y,linewidth=linewidth,linestyle=linestyle)
                        l.set_color(color)
                        l.set_dashes(dashes)
                        ax.add_line(l)
Exemplo n.º 17
0
def phasormovie(z, r_x=0, r_y=0, filename=0):

    if filename:
        print "Saving to file not supported."
        
    import MLab, Tkinter
    
    # Make movie memory.
    
    movie    = []
    frames   = 16
    colormap = create_bipolar_colormap()

    # Scale factors for z.

    zmax = MLab.max(MLab.max(abs(z)))
    z_scale = (len(colormap)-1)/(2*zmax)

    # Calculate each frame.
    
    root = Tkinter.Tk()    
    for Nr in range(0,frames):
        pic = _plot_scaled_matrix(root, colormap, ((z+zmax)*z_scale).real,
                                  r_x, r_y)
        movie.append(pic)
        z *= exp(2j*pi/frames)
        
    # Close window procedure.

    stop = [0]    
    def callback(): stop[0] = 1 
    root.protocol("WM_DELETE_WINDOW", callback)
            
    # Animate picture.

    while not stop[0]:
        for x in range(frames):
            if stop[0]: break
            movie[x].pack()
            root.update()
            root.after(int(100))
            movie[x].forget()

    root.destroy()  
Exemplo n.º 18
0
def rosen_der(x):
    xm = x[1:-1]
    xm_m1 = x[:-2]
    xm_p1 = x[2:]
    der = MLab.zeros(x.shape, x.typecode())
    der[1:-1] = 200 * (xm - xm_m1**2) - 400 * (xm_p1 - xm**2) * xm - 2 * (1 -
                                                                          xm)
    der[0] = -400 * x[0] * (x[1] - x[0]**2) - 2 * (1 - x[0])
    der[-1] = 200 * (x[-1] - x[-2]**2)
    return der
Exemplo n.º 19
0
def percentile(m, p):
    """percentile(m) returns the pth percentile of m along the first dimension of m.
    """
    if isinstance(m, list): a = Numeric.array(m)
    else: a = m
    sorted = MLab.msort(a)
    if a.shape[0] % 2 == 1:
        return sorted[int(a.shape[0] * p / 100)]
    else:
        index = a.shape[0] * p / 100
        return (sorted[index - 1] + sorted[index]) / 2
Exemplo n.º 20
0
    def statisticAll(self):
      "statisticAll(self) - calculate statistic for all curves"
      out = []
      for id in range(self.nc):
	y = self.y[id-1]
	ymin,ymax = min(y),max(y)
	y_hpeak = ymin + .5 *(ymax-ymin)
	x = self.x
	x_hpeak = []
	for i in range(self.NPT):
		if y[i] >= y_hpeak:
			i1 = i
			break
	for i in range(i1+1,self.NPT):
		if y[i] <= y_hpeak:
			i2 = i
			break
		if i == self.NPT-1: i2 = i
	x_hpeak = [x[i1],x[i2]]
	fwhm = abs(x_hpeak[1]-x_hpeak[0])
	for i in range(self.NPT):
		if y[i] == ymax: 
			jmax = i
			break
	xpeak = x[jmax]

	out.append([MLab.mean(y),MLab.std(y),ymin,ymax,xpeak,jmax,y_hpeak,x_hpeak,fwhm])
      fo = open('fwhm.txt','w')
      fo.write('File:  '+self.fname)
      for id in range(self.nc):
		list = out[id]
		fo.write('\n\nCurve #'+str(id+1))
		fo.write('\nMean: '+ str(list[0]))
		fo.write('\nStandard Deviation: '+ str(list[1]))
		fo.write('\nYmin, Ymax: '+ str(list[2]) + ', '+ str(list[3]))
		fo.write('\nYmax @ Xpos[i]: ' + str(list[4]) +'[i='+str(list[5])+']')
		fo.write('\nY-hpeak @ X-hpeak: ' + str(list[6]) +' @ '+str(list[7]))
		fo.write('\nFWHM: ' + str(list[8]))
      fo.close()
      xdisplayfile('fwhm.txt')
Exemplo n.º 21
0
    def statisticCalc(self):
	"statisticCalc(self) - statistic calculation "
	id = string.atoi(self.stdVar.get())
	if id <1 or id > self.nc: return

	y = self.y[id-1]
	ymin,ymax = min(y),max(y)
	y_hpeak = ymin + .5 *(ymax-ymin)
	x = self.x
	x_hpeak = []
	for i in range(self.NPT):
		if y[i] >= y_hpeak:
			i1 = i
			break
	for i in range(i1+1,self.NPT):
		if y[i] <= y_hpeak:
			i2 = i
			break
		if i == self.NPT-1: i2 = i
	if y[i1] == y_hpeak: x_hpeak_l = x[i1]
	else:
		x_hpeak_l = (y_hpeak-y[i1-1])/(y[i1]-y[i1-1])*(x[i1]-x[i1-1])+x[i1-1]
	if y[i2] == y_hpeak: x_hpeak_r = x[i2]
	else:
		x_hpeak_r = (y_hpeak-y[i2-1])/(y[i2]-y[i2-1])*(x[i2]-x[i2-1])+x[i2-1]
	x_hpeak = [x_hpeak_l,x_hpeak_r]

	self.fwhm = abs(x_hpeak[1]-x_hpeak[0])
	for i in range(self.NPT):
		if y[i] == ymax: 
			jmax = i
			break
	xpeak = x[jmax]
		
	self.stdL[0].set('Curve #'+str(id)+'  Mean: '+ str(MLab.mean(y)))
	self.stdL[1].set('Standard Deviation: '+ str(MLab.std(y)))
	self.stdL[2].set('Ymin, Ymax: '+ str(ymin) + ', '+ str(ymax))
	self.stdL[3].set('Ymax @ Xpos[i]: ' + str(xpeak) +'[i='+str(jmax)+']')
	self.stdL[4].set('Y-hpeak @ X-hpeak: ' + str(y_hpeak) +' @ '+str(x_hpeak))
	self.stdL[5].set('FWHM: ' + str(self.fwhm))
def correlateRatingsTags(Ratings, Group='FullDirTrees'):
    import MLab
    DirectionGivers = '(EDA|EMWC|KLS|KXP|TJS|WLH)'
    Routes = '.*'
    Envs = '.*'
    Suffix = 'Dirs_\d.txt$'
    Directions = DirectionCorpusReader('_'.join(
        [DirectionGivers, Envs, Routes, Suffix]))

    CFDist = LearnCondDist(Directions,
                           list(Directions.items(Group)),
                           Start='DIRECTIONS',
                           verbose=1)
    TagOrder = [
        tag.symbol() for tag in CFDist.conditions()
        if not (tag.symbol().endswith('_P') or tag.symbol().endswith('_N')
                or tag.symbol().endswith('_V'))
    ]
    TagOrder.sort()
    results = {}
    for item in Directions.items(Group):
        dirID = item.split('-')[1]
        if not Ratings.has_key(dirID):
            print 'Skipping', dirID
            continue
        TagCounts = {}
        DirModel = LearnCondDist(Directions, [item], Start='DIRECTIONS')
        for nonterm in DirModel.conditions():
            tag = nonterm.symbol()
            #print nonterm, [DirModel[nonterm].count(s) for s in DirModel[nonterm].samples()]
            if tag in TagOrder:
                TagCounts[tag] = MLab.sum([
                    DirModel[nonterm].count(s)
                    for s in DirModel[nonterm].samples()
                ])
        TraitList = [
            Ratings[dirID][2], Ratings[dirID][3], Ratings[dirID][4],
            Ratings[dirID][5], Ratings[dirID][6] + Ratings[dirID][7]
        ]
        for TagName in TagOrder:
            if TagCounts.has_key(TagName):
                TraitList.append(TagCounts[TagName])
            else:
                TraitList.append(0)
        results[dirID] = TraitList
        #print dirID, TraitList
    for k in Ratings.keys():
        if not results.has_key(k):
            results[k] = [0] * (len(TagOrder) + 5)  # Number of ratings used
    return results, TagOrder
Exemplo n.º 23
0
def cholesky_decomposition(a):
    _assertRank2(a)
    _assertSquareness(a)
    t =_commonType(a)
    a = _castCopyAndTranspose(t, a)
    m = a.shape[0]
    n = a.shape[1]
    if _array_kind[t] == 1:
	lapack_routine = lapack_lite.zpotrf
    else:
	lapack_routine = lapack_lite.dpotrf
    results = lapack_routine('L', n, a, m, 0)
    if results['info'] > 0:
        raise LinAlgError, 'Matrix is not positive definite - Cholesky decomposition cannot be computed'
    return copy.copy(Numeric.transpose(MLab.triu(a,k=0)))
Exemplo n.º 24
0
def plot_matrix(z, r_x=0, r_y=0, filename=0, colorcode=0):

    if filename:
        print "Saving to file not supported."
    
    matlab.put('z', z)
    if r_x:
        matlab.put('x', r_x)
    if r_y:
        matlab.put('y', r_y)

    if not r_x and not r_y:
        matlab("imagesc(z)")
    else:
        matlab("imagesc(x,y,z)")
   
    if (MLab.min(MLab.min(z)) < 0) and (0 < MLab.max(MLab.max(z))):
        create_bipolar_color_map()
    else:
        matlab("colormap jet")
        
    matlab("axis equal")
    matlab("axis off")
    matlab("shading flat")
def calculatePerDirection(results, StatTable):
    ##	print 'in calculatePerDirection'
    StatTable.setdefault('/FollowerID', []).append(results['FollowerID'])
    StatTable.setdefault('/Test Name', []).append(results['/Test Name'])
    StatTable.setdefault('DirectionIDs',
                         []).append(results['DirectionIDs'].sort())
    StatTable.setdefault('PerDirection', []).append({})
    PerDirection = StatTable['PerDirection'][len(StatTable['PerDirection']) -
                                             1]
    for dir in results['DirectionIDs']:
        #		print '.',;
        v = results[dir]
        PerDirection[dir] = [v.get(x) for x in PerDirectionFields[:-1]]
        TimesFollowed = v.get(PerDirectionFields[-1])
        if isinstance(TimesFollowed, int):
            if TimesFollowed > 1:
                PerDirection[dir] = [
                    float(x) / TimesFollowed for x in PerDirection[dir]
                ]
            PerDirection[dir].append(TimesFollowed)
        elif isinstance(TimesFollowed, list):
            PerDirection[dir] = [MLab.mean(x) for x in PerDirection[dir]]
            #    PerDirection[dir+'_Std'] = [MLab.std(x) for x in PerDirection[dir]]
            PerDirection[dir].append(MLab.sum(TimesFollowed))
Exemplo n.º 26
0
	def transform_one_file(self, src_pathname, delimiter, outputdir, b_instance, threshold, type, no_of_valids):
		"""
		08-09-05
			add type
		08-29-05
			add no_of_valids to cut genes with too few valid values
		"""
		reader = csv.reader(file(src_pathname), delimiter=delimiter)
		filename = os.path.basename(src_pathname)
		output_filename = os.path.join(outputdir, filename)
		std_list = []
		for row in reader:
			gene_id = row[0]
			new_row = []
			mask_ls = []
			for i in range(1, len(row)):
				if row[i] == 'NA':
					new_row.append(1e20)
					mask_ls.append(1)
				elif row[i] == '':
					#ignore empty entry
					continue
				else:
					value = float(row[i])
					if type==1:
						if value<=10:
							value = 10
						value = math.log(value)
					new_row.append(value)
					mask_ls.append(0)
			ma_array = array(new_row, mask=mask_ls)
			if self.debug:
				print "The data vector is ",ma_array
				print "Its mask is ", ma_array.mask()
			if len(ma_array.compressed())>=no_of_valids:	#at least two samples, otherwise, correlation can't be calculated
				#08-29-05	no_of_valids controls not too many NA's, which is for graph_modeling
				std = MLab.std(ma_array.compressed())	#disregard the NAs
				if self.debug:
					print "std is ",std
					raw_input("Continue?(Y/n)")
				std_list.append(std)
		del reader
		if len(std_list)>100:
			r.png('%s.png'%output_filename)
			r.hist(std_list, main='histogram',xlab='std',ylab='freq')
			r.dev_off()
Exemplo n.º 27
0
    def setArray(self, arr, palette=None, axis_x_max=None, axis_y_max=None):
        self.array = arr
        self.palette = palette or palettes["brownish"]

        s = shape(arr)
        axis_y_max = axis_y_max or s[0]
        axis_x_max = axis_x_max or s[1]

        self.setAxisScale(QwtPlot.xBottom, 0, axis_x_max)
        self.setAxisScale(QwtPlot.yLeft, 0, axis_y_max)
        self.replot()

        # if no_scaling: #for subplots and stuff like that...
        #  im = pilutil.toimage(MLab.flipud(arr), pal = palette, cmin = 0., cmax = 1.)
        self.image = pilutil.toimage(MLab.flipud(arr), pal=self.palette)
        if hasattr(self, "image_unzoomed"):
            self.image_unzoomed = self.image
        self.doPlotImage()

        if self.legend:
            self.legend.setImagePlot(self)
Exemplo n.º 28
0
    def setImagePlot(self, implot, arr=None):
        self.implot = implot
        implot.legend = self

        self.parent_array = arr or implot.array
        self.palette = implot.palette

        r = ravel(self.parent_array)
        r_min, r_max = min(r), max(r)

        dif = r_max - r_min
        if dif == 0:
            r_max = r_min + 1
        self.array = arange(r_min, r_max, dif / 256.)[:, NewAxis]

        self.setAxisScale(QwtPlot.yLeft, r_min, r_max)
        self.replot()

        # if no_scaling: #for subplots and stuff like that...
        #  im = pilutil.toimage(MLab.flipud(arr), pal = palette, cmin = 0., cmax = 1.)
        self.image = pilutil.toimage(MLab.flipud(self.array), pal=self.palette)
        self.doPlotImage()
Exemplo n.º 29
0
 def zoom(self, x0, y0, x1, y1):
     x0, y0, x1, y1 = map(int, (x0, y0, x1, y1))
     if not self.rescale_on_zoom:
         if not hasattr(self, "image_unzoomed"):
             self.image_unzoomed = copy.copy(self.image)
         unz = self.image_unzoomed
         y_top = unz.size[1]
         flip = lambda y: y_top - y
         if y1 > y0:
             y0, y1 = y1, y0
         self.image = copy.copy(unz)
         self.image = self.image.transform(unz.size, Image.EXTENT,
                                           (x0, flip(y0), x1, flip(y1)))
     else:
         if y1 < y0:
             y0, y1 = y1, y0
         #print x0, x1, y0, y1
         #print shape(self.array)
         arr = self.array[y0:y1, x0:x1]
         self.image = pilutil.toimage(MLab.flipud(arr), pal=self.palette)
         if self.legend:
             self.legend.setImagePlot(self, arr)
     self.doPlotImage()
Exemplo n.º 30
0
def main():
    #@TODO: make names for all these magic numbers...
    
    screen = makeWindow(winsize=(200, 400))
    grad = makeGradient()
    
    black = pygame.Surface((80,10))
    black.fill((0,0,0))

    # the windowing array quiets down the edges of the sample
    # to prevent "clicking" at the edges:
    windowing = MLab.blackman(64)
    
    session = fakeSession()
    t = 0
    
    center= 81 # same as in creating the graph @TODO: consolidate these

    while keepLooping():

        # simulate aquiring data for 1/4th of a second (64 samples):
        time.sleep(0.25) 

        data = session[t:t+64] * windowing
        graph = makeSpectrogram(data)

        t += 64
        if t >= len(session):
            t = 0

        # draw the gradient, then cover part of it up:
        for i in range(32):
            screen.blit(grad, (20,         20+i*10))
            # left is blank for now:
            #screen.blit(black,(20 +(0       ), 20+i*10))
            # right side shows the data:
            screen.blit(black,(20+center+(graph[i]*10), 20+i*10))
def pseudocolor(M, draw=1):
    """Draws a psuedo-color representation of a matrix.
    
    Comments:
        * Use keyword setting draw=0 to just return the plot object without
            first drawing the plot.
            
        * Routine actually inverts the ordering of the matrix so it is
        viewed as we generally think of matrices (i.e. with row indices
        increasing as we move down, and column indices increasing as we
        move to the right)
    """
    nr, nc = M.shape
    plot = contours.ColorPlot()
    ai = plots.auto_axis(Num.ravel(M))
    plot.axes.zaxis(min = ai.min, max = ai.max, 
                    tickstart = ai.start, tickstep = ai.step)
    plot.axes(autoresolution = (nr,nc))
    clrmatrix = contours.ColorMatrix(MLab.flipud(M), nr, nc)
    plot.add(clrmatrix)
    
    if draw:
        plot.draw()
    return plot
Exemplo n.º 32
0
        a = self.get_mask()
        s = self.get_pixel_size()
        img = Image.open(self.img)
        img = img.resize(s)
        self.canvas.image.paste(img,self.get_pixel_offset(),a)    
  
  
if __name__ == "__main__":
    c = TileEngineCanvas(40,30,400,300)
    
    map = range(256)
    for i in range(0,150):
        map[i] = "red.png"
    for i in range(150,170):
        map[i] = "#f60"
    a = MLab.rand(20,15) * 255
    a = a.astype('b')

    b = MLab.rand(4,3) * 255
    b = b.astype('b')

    map2 = range(256)
    for i in range(128):
        map2[i] = "green.png"
    for i in range(128,200):
        map2[i] = "blue.png"
    for i in range(200,256):
        map2[i] = "red.png"
        
    sprites = []
    import random
Exemplo n.º 33
0
def fminBFGS(f,
             x0,
             fprime=None,
             args=(),
             avegtol=1e-5,
             maxiter=None,
             fulloutput=0,
             printmessg=1):
    """xopt = fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5,
                       maxiter=None, fulloutput=0, printmessg=1)

    Optimize the function, f, whose gradient is given by fprime using the
    quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)
    See Wright, and Nocedal 'Numerical Optimization', 1999, pg. 198.
    """

    app_fprime = 0
    if fprime is None:
        app_fprime = 1

    x0 = Num.asarray(x0)
    if maxiter is None:
        maxiter = len(x0) * 200
    func_calls = 0
    grad_calls = 0
    k = 0
    N = len(x0)
    gtol = N * avegtol
    I = MLab.eye(N)
    Hk = I

    if app_fprime:
        gfk = apply(approx_fprime, (x0, f) + args)
        func_calls = func_calls + len(x0) + 1
    else:
        gfk = apply(fprime, (x0, ) + args)
        grad_calls = grad_calls + 1
    xk = x0
    sk = [2 * gtol]
    while (Num.add.reduce(abs(gfk)) > gtol) and (k < maxiter):
        pk = -Num.dot(Hk, gfk)
        alpha_k, fc, gc = line_search_BFGS(f, xk, pk, gfk, args)
        func_calls = func_calls + fc
        xkp1 = xk + alpha_k * pk
        sk = xkp1 - xk
        xk = xkp1
        if app_fprime:
            gfkp1 = apply(approx_fprime, (xkp1, f) + args)
            func_calls = func_calls + gc + len(x0) + 1
        else:
            gfkp1 = apply(fprime, (xkp1, ) + args)
            grad_calls = grad_calls + gc + 1

        yk = gfkp1 - gfk
        k = k + 1

        rhok = 1 / Num.dot(yk, sk)
        A1 = I - sk[:, Num.NewAxis] * yk[Num.NewAxis, :] * rhok
        A2 = I - yk[:, Num.NewAxis] * sk[Num.NewAxis, :] * rhok
        Hk = Num.dot(A1, Num.dot(
            Hk, A2)) + rhok * sk[:, Num.NewAxis] * sk[Num.NewAxis, :]
        gfk = gfkp1

    if printmessg or fulloutput:
        fval = apply(f, (xk, ) + args)
    if k >= maxiter:
        warnflag = 1
        if printmessg:
            print "Warning: Maximum number of iterations has been exceeded"
            print "         Current function value: %f" % fval
            print "         Iterations: %d" % k
            print "         Function evaluations: %d" % func_calls
            print "         Gradient evaluations: %d" % grad_calls
    else:
        warnflag = 0
        if printmessg:
            print "Optimization terminated successfully."
            print "         Current function value: %f" % fval
            print "         Iterations: %d" % k
            print "         Function evaluations: %d" % func_calls
            print "         Gradient evaluations: %d" % grad_calls

    if fulloutput:
        return xk, fval, func_calls, grad_calls, warnflag
    else:
        return xk
Exemplo n.º 34
0
def analyzeDistanceVsLandmarks(DistanceOccur, LandmarkOccur, TraitInfo,
                               SortName, SortCaption, Legend):
    Dists = []
    for list in DistanceOccur:
        Dists += list
    Landmarks = []
    for list in LandmarkOccur:
        Landmarks += list
    bins = 3
    SplitLines = []
    DistCuts = SplitPercentile(Dists, bins)
    maxLndm = max(Landmarks)
    minLndm = min(Landmarks)
    for d in DistCuts:
        SplitLines.append([d, minLndm, d, maxLndm])
    LandmarkCuts = SplitPercentile(Landmarks, bins)
    maxDist = max(Dists)
    minDist = min(Dists)
    for lm in LandmarkCuts:
        SplitLines.append([minDist, lm, maxDist, lm])
    numDirs = len(TraitInfo.Keys)
    Incr = numDirs / float(len(DistanceOccur))
    Scaled = []
    for low, high in zip(range(0, numDirs + 1 - Incr, Incr),
                         range(Incr, numDirs + 1, Incr)):
        if high + Incr > numDirs: high = numDirs
        if ScaleFn[SortName]:
            Scaled.append([
                ScaleFn[SortName](TraitInfo.Ratings[k])
                for k in TraitInfo.Keys[low:high]
            ])
        else:
            Scaled = [None]
#     print len(DistanceOccur),[len(i) for i in DistanceOccur]
#     print len(LandmarkOccur),[len(i) for i in LandmarkOccur]
#     print len(Scaled), [len(i) for i in Scaled if i], Scaled
    LogGraphs.graphScatter(
        DistanceOccur,
        LandmarkOccur,
        ['Distance and Direction Occurences', 'Landmark Occurences'],
        'Distance_Landmark_' + SortName + '_All',
        Legend,
        SortCaption,
        Lines=SplitLines,
    )
    #Colors=Scaled)
    TraitInfo.Keys.sort(lambda x, y: cmp(TraitInfo.LandmarksPerRouteLeg[x],
                                         TraitInfo.LandmarksPerRouteLeg[y]))
    LandmarkSplit = []
    Len = len(TraitInfo.Keys)
    for i in range(1, bins + 1):
        LandmarkSplit += [TraitInfo.Keys[(i - 1) * Len / bins:i * Len / bins]]
    Split = []
    for list in LandmarkSplit:
        for l, h in zip([minDist] + DistCuts, DistCuts + [maxDist]):
            Split.append([
                x for x in list if TraitInfo.DistDirPerRouteLeg[x] >= l
                and TraitInfo.DistDirPerRouteLeg[x] <= h
            ])
    Split.append(TraitInfo.Keys)  #All
    StatStr = '\t'.join([
        'Label', 'Dist', 'Lndm', 'Effc', 'CEff', 'Corr', 'Conf', 'Ratg', 'Numb'
    ] + [x[0:3] for x in LogAnalyzer.Directors]) + '\n'
    Labels = [
        'L-L', 'M-L', 'H-L', 'L-M', 'M-M', 'H-M', 'L-H', 'M-H', 'H-H', 'All'
    ]
    Ratgs = []
    Dists = []
    Lndms = []
    for Label, list in zip(Labels, Split):
        Dist = MLab.mean(
            Numeric.array([TraitInfo.DistDirPerRouteLeg[x] for x in list],
                          Numeric.Float))
        Lndm = MLab.mean(
            Numeric.array([TraitInfo.LandmarksPerRouteLeg[x] for x in list],
                          Numeric.Float))
        Effc = MLab.mean(
            Numeric.array([TraitInfo.Ratings[x][0] for x in list],
                          Numeric.Float))
        Corr = MLab.mean(
            Numeric.array([TraitInfo.Ratings[x][1] for x in list],
                          Numeric.Float))
        Conf = MLab.mean(
            Numeric.array([TraitInfo.Ratings[x][2] for x in list],
                          Numeric.Float))
        Ratg = MLab.mean(
            Numeric.array([TraitInfo.Ratings[x][3] for x in list],
                          Numeric.Float))
        CEff = Effc / Corr
        for stat in [
                Label,
                Dist,
                Lndm,
                Effc,
                CEff,
                Corr,
                Conf,
                Ratg,
                len(list),
        ]:
            StatStr += str(stat)[0:6].ljust(6) + '\t'
        GiverOccur = {}
        for g in LogAnalyzer.Directors:
            GiverOccur[g[0:3]] = 0
        for name in list:
            GiverOccur[name[0:3]] += 1
        for g in LogAnalyzer.Directors:
            StatStr += str(GiverOccur[g[0:3]]) + '\t'
        StatStr += '\n'
        Dists.append(Dist)
        Ratgs.append(Ratg)
        Lndms.append(Lndm)


#         if ScaleFn[SortName]: Scaled= [ScaleFn[SortName](TraitInfo.Ratings[k]) for k in list]
#         else: Scaled = range(len(list))
#         LogGraphs.graphScatter([Scaled],
#                                [[TraitInfo.DistDirPerRouteLeg[x] for x in list]],
#                                ['Relative Rating','DistDir'],
#                                'Traits_Rating_DistDir_'+Label+'_'+SortName+'_All',Legend,SortCaption,LogX=1)
#         LogGraphs.graphScatter([Scaled],
#                                [[TraitInfo.LandmarksPerRouteLeg[x] for x in list]],
#                                ['Relative Rating','Landmarks'],
#                                'Traits_Rating_Landmarks_'+Label+'_'+SortName+'_All',Legend,SortCaption,LogX=1)
    LogGraphs.simpleGraph(
        [Dists, Lndms], [Ratgs, Ratgs],
        'Directions & Distances, Landmarks vs. Relative Rating',
        'Rating_DistLandm_' + Label + '_' + SortName + '_All',
        ['Distances and Directions', 'Objects,Appearaces,Paths'],
        SortCaption,
        Labels=['Directions & Distances, Landmarks', 'Relative Rating'])
    print StatStr
    FILE = open(
        os.path.join('Graphs',
                     'DistDirVsLandmarks_' + SortName + '_Results.tsv'), 'w')
    FILE.write(StatStr)
    FILE.close()
Exemplo n.º 35
0
import Numeric
from Numeric import *
import math
import MLab

self.x_axis = 12
self.y_axis = 12

self.x = MLab.rand(self.number) * self.x_axis
self.y = MLab.rand(self.number) * self.y_axis

for j in range(self.number):
    self.x[j] = int(self.x[j])
    self.y[j] = int(self.y[j])

self.pos_grid = zeros((self.x_axis, self.y_axis, 2))
for i in range(self.x_axis):
    for j in range(self.y_axis):
        for k in range(self.number):
            if self.x[k] == i and self.y[k] == j:
                self.pos_grid[i][j][0] = 0
                self.pos_grid[i][j][1] = self.initial_tokens
Exemplo n.º 36
0
import Numeric
from Numeric import * 
import MLab

for i in range(self.x_axis):
    for j in range(self.y_axis):
        if self.pos_grid[i][j][1] > 0:
            if self.pos_grid[i][j][0] > self.lifespan or self.pos_grid[i][j][1] <= 1:
                self.pos_grid[i][j] = [0,0]
            else:
                self.pos_grid[i][j][1] -= 1
                self.pos_grid[i][j][0] += 1
                

self.desire_grid = MLab.rand(self.x_axis,self.y_axis) * 0
for x in range(self.x_axis):
    for y in range(self.y_axis):
        value = 0
        for x2 in range(self.x_axis):
            for y2 in range(self.y_axis):
                value += self.pos_grid[x2][y2][1] / (1 + math.sqrt(pow(abs(x-x2),2) + pow(abs(y-y2),2)))
        self.desire_grid[x][y] = value  

self.already_grid = zeros((self.x_axis,self.y_axis))

foodgrid = self.organizer.get_var("verdefood","desire_grid",[0])                
self.food_pos_grid = self.organizer.get_var("verdefood","pos_grid",[0])                
self.other_pos_grid = self.organizer.get_var("verdeeater","pos_grid",[0])


Exemplo n.º 37
0
def calculatePercentiles(TraitInfo,
                         SortFunc=cmpNull,
                         SortName='Sorted',
                         SortCaption='',
                         Bins=5,
                         Legend=[],
                         Margin=25):
    """TraitInfo.Stats is a hashtable of lists of statistics per direction set."""
    if not TraitInfo.lastStatNum:
        TraitInfo.lastStatNum = len(TraitInfo.StatNames)
    TraitInfo.Keys.sort(
        lambda x, y: SortFunc(TraitInfo.Ratings[x], TraitInfo.Ratings[y]))
    X = []
    Y = []
    Yerrs = []
    i = .5
    Words = []
    Redirs = []
    Forwards = []
    Turns = []
    Rating = []
    LandmarkOccur = []
    Landmarks = ['STRUCT', 'OBJ', 'PATH']
    DistDirOccur = []
    DistDirs = ['DIST', 'DIR']
    TraitInfo.LandmarksPerRouteLeg = {}
    TraitInfo.DistDirPerRouteLeg = {}
    numDirs = len(TraitInfo.Keys)
    Incr = numDirs / float(Bins)
    for low, high in zip(range(0, numDirs + 1 - Incr, Incr),
                         range(Incr, numDirs + 1, Incr)):
        if high + Incr > numDirs: high = numDirs
        Values = Numeric.array(
            [TraitInfo.Stats[x] for x in TraitInfo.Keys[low:high]],
            Numeric.Float)
        X += [Numeric.array([x + i for x in range(TraitInfo.lastStatNum)])]
        Y += [MLab.mean(Values)[0:TraitInfo.lastStatNum]]  # or median
        Yerrs += [MLab.std(Values)]

        Redirs += [[
            random.gauss(x, .02)
            for x in Values[:, TraitInfo.StatNames.index('ReDirects')]
        ]]
        Forwards += [[
            random.gauss(x, .02)
            for x in Values[:, TraitInfo.StatNames.index('FwdMoves')]
        ]]
        Turns += [[
            random.gauss(x, .02)
            for x in Values[:, TraitInfo.StatNames.index('Turns')]
        ]]
        RouteLegs = [
            max(float(rl), 1.0)
            for rl in Values[:, TraitInfo.StatNames.index('ROUTE_LEG')]
        ]
        DistDirOccur += sumOverStats(DistDirs, TraitInfo.DistDirPerRouteLeg,
                                     TraitInfo.Keys[low:high],
                                     TraitInfo.StatNames, Values, RouteLegs)
        LandmarkOccur += sumOverStats(Landmarks,
                                      TraitInfo.LandmarksPerRouteLeg,
                                      TraitInfo.Keys[low:high],
                                      TraitInfo.StatNames, Values, RouteLegs)
        #Words += [[random.gauss(x,.02) for x in Values[:,TraitInfo.StatNames.index('Words')]]]
        #i += 1.0/Bins
    Names = TraitInfo.StatNames[0:TraitInfo.lastStatNum]
    Yerrs = [x / math.sqrt(high - low - 1) for x in Yerrs]
    LogGraphs.graphErrorbar(X, Y, Names, 'Traits_' + SortName + '_All', Yerrs,
                            Legend, SortCaption)
    graphHighStats(TraitInfo, SortName, SortCaption, Bins, Legend, X, Y, Yerrs)
    LogGraphs.graphScatter(Forwards, Redirs, ['Forwards', 'ReDirects'],
                           'FwdsRedirs_' + SortName + '_All', Legend,
                           SortCaption)
    LogGraphs.graphScatter(Turns, Redirs, ['Turns', 'ReDirects'],
                           'TurnsRedirs_' + SortName + '_All', Legend,
                           SortCaption)
    analyzeDistanceVsLandmarks(DistDirOccur, LandmarkOccur, TraitInfo,
                               SortName, SortCaption, Legend)
Exemplo n.º 38
0
            f = open('observables_tau%g.dat' % d.tau, 'w')
            pickle.dump(observables, f)
            f.close()

            print 'tau',d.tau,'block',d.blocks-tau[2],\
                  'no. of walkers',d.no_of_walkers
            print "energy = %g +/- %g"%(d.energy,\
                                        math.sqrt(d.energy2/\
                                                  float(d.steps)))
            print "sigma = %g" % d.energy2

        tau[2] -= 1

    if d.master:
        print 'tau', d.tau, 'no. of walkers', d.no_of_walkers
        print "energy = %g +/- %g"%(MLab.mean(observables[0]),\
                                    math.sqrt(MLab.std(observables[0])/\
                                              float(len(observables[0]))))
        print "sigma = %g" % MLab.std(observables[0])
        os.remove('observables_tau%g.dat' % d.tau)
        f = open('results_' + d.outfile, 'a')
        f.write('tau %d no. of walkers %d' % (d.tau, d.no_of_walkers))
        f.write("energy = %g +/- %g"%(MLab.mean(observables[0]),\
                                    math.sqrt(MLab.std(observables[0])/\
                                              float(len(observables[0])))))
        f.write("sigma = %g\n" % MLab.std(observables[0]))
        f.close()

    del d.tau_list[0]

if d.master: os.remove('checkpoint.dat')
Exemplo n.º 39
0
            pickle.dump(observables,f)
            f.close()
            
            print 'tau',d.tau,'block',d.blocks-tau[2],\
                  'no. of walkers',d.no_of_walkers
            print "energy = %g +/- %g"%(d.energy,\
                                        math.sqrt(d.energy2/\
                                                  float(d.steps)))
            print "sigma = %g"%d.energy2

        tau[2] -= 1

    
    if d.master:
        print 'tau',d.tau,'no. of walkers',d.no_of_walkers
        print "energy = %g +/- %g"%(MLab.mean(observables[0]),\
                                    math.sqrt(MLab.std(observables[0])/\
                                              float(len(observables[0]))))
        print "sigma = %g"%MLab.std(observables[0])
        os.remove('observables_tau%g.dat'%d.tau)
        f = open('results_'+d.outfile,'a')
        f.write('tau %d no. of walkers %d'%(d.tau,d.no_of_walkers))
        f.write("energy = %g +/- %g"%(MLab.mean(observables[0]),\
                                    math.sqrt(MLab.std(observables[0])/\
                                              float(len(observables[0])))))
        f.write("sigma = %g\n"%MLab.std(observables[0]))
        f.close()

    del d.tau_list[0]
    
if d.master: os.remove('checkpoint.dat')
Exemplo n.º 40
0
import Numeric
from Numeric import * 
import math
import MLab

self.x_axis = 12
self.y_axis = 12

self.x = MLab.rand(self.number) * self.x_axis
self.y = MLab.rand(self.number) * self.y_axis

for j in range(self.number):
    self.x[j] = int(self.x[j])
    self.y[j] = int(self.y[j])

self.pos_grid = zeros((self.x_axis,self.y_axis,2))
for i in range(self.x_axis):
    for j in range(self.y_axis):
        for k in range(self.number):
            if self.x[k] == i and self.y[k] == j:
                self.pos_grid[i][j][0] = 0
                self.pos_grid[i][j][1] = self.initial_tokens

Exemplo n.º 41
0
def rosen(x):  # The Rosenbrock function
    return MLab.sum(100.0 * (x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0)
Exemplo n.º 42
0
                
        mxLat = N.ones((NumTimesteps),N.Float)
        meanLat = N.ones((NumTimesteps),N.Float)
        mnLat = N.ones((NumTimesteps),N.Float)
        leTimes = []
        
        # Assumming a Timestep is an hour (3600 seconds)!
        count = 0        
        for cn in range(NumTimesteps):
            dtime = datetime.timedelta(seconds=cn*3600)
            time  = modelStartTime + dtime
            tLat = N.take(Latitude,(cn,), 1)
    #         print tLat
            mxL = MA.maximum(tLat,)
            mnL = MA.minimum(tLat,)            
            meanL=MLab.mean(tLat,0)    
            N.put(mxLat,cn,mxL)            
            N.put(mnLat,cn,mnL)
            N.put(meanLat,cn,meanL)
            leTimes.append(time)
            
            nboth = 0
            if time in ocTime:
                write_matches = 1
                while write_matches == 1:
#                     print nboth, cn                     
                    if count == 0:
                        nboth = ocTime.index(time)
                        count +=1                        
#                         fs.write('%i, %s, %i %i %i %i %i, %f %f %f, %f %f %f\n'%(cn, file, int(time.year), int(time.month), int(time.day), int(time.hour), int(time.minute), mxL, meanL, mnL, ocLat[nboth,0], ocLat[nboth,1],ocLat[nboth,2]))
                    else:
Exemplo n.º 43
0
         target = [x,y-1]
 if y < self.y_axis - 1:
     n = foodgrid[x][y+1]
     if n > max:
         max = n
         target = [x,y+1]
 if max == 0:
     target = [x,y]
 if self.food_pos_grid[target[0]][target[1]][1] == 0 and self.food2_pos_grid[target[0]][target[1]][1] == 0 :
     if self.pos_grid[target[0]][target[1]][1] == 0:            
         self.pos_grid[target[0]][target[1]] = copy.deepcopy(self.pos_grid[x][y])
         self.pos_grid[x][y] = [0,0]
         self.already_grid[x][y] = 1
         self.already_grid[target[0]][target[1]] = 1
     elif (target[0] != x or target[1] != y) and (target[0] != 0 or target[1] != 0) and self.pos_grid[x][y][0] > self.rep_age and self.pos_grid[target[0]][target[1]][0] > self.rep_age:
         z1 = MLab.rand(1) * self.x_axis
         z2 = MLab.rand(1) * self.y_axis
         z1 = int(z1[0])
         z2 = int(z2[0])
         if self.food_pos_grid[z1][z2][1] == 0 and self.food2_pos_grid[z1][z2][1]==0 and self.pos_grid[z1][z2][1]==0:
             self.pos_grid[z1][z2][0] = 0
             self.pos_grid[z1][z2][1] = (self.pos_grid[x][y][1] + self.pos_grid[target[0]][target[1]][1])/2
         
 elif self.food2_pos_grid[target[0]][target[1]][1] == 0:
         self.pos_grid[x][y][1] += (self.food_pos_grid[target[0]][target[1]][1]/10 + 1)
         self.pos_grid[target[0]][target[1]] = copy.deepcopy(self.pos_grid[x][y])
         self.pos_grid[x][y] = [0,0]
         self.food_pos_grid[target[0]][target[1]] = [0,0]
        
 else:
     temp = "temp"
Exemplo n.º 44
0
def main():
    EMAN.appinit(sys.argv)
    if sys.argv[-1].startswith("usefs="):
        sys.argv = sys.argv[:-1]  # remove the runpar fileserver info

    (options, rawimage, refmap) = parse_command_line()

    sffile = options.sffile
    verbose = options.verbose
    shrink = options.shrink
    mask = options.mask
    first = options.first
    last = options.last
    scorefunc = options.scorefunc

    projfile = options.projection
    output_ptcls = options.update_rawimage
    cmplstfile = options.cmplstfile
    ortlstfile = options.ortlstfile
    startSym = options.startSym
    endSym = options.endSym

    if not options.nocmdlog:
        pid = EMAN.LOGbegin(sys.argv)
        EMAN.LOGInfile(pid, rawimage)
        EMAN.LOGInfile(pid, refmap)
        if projfile:
            EMAN.LOGOutfile(pid, projfile)
        if output_ptcls:
            EMAN.LOGOutfile(pid, output_ptcls)
        if cmplstfile:
            EMAN.LOGOutfile(pid, cmplstfile)
        if ortlstfile:
            EMAN.LOGOutfile(pid, ortlstfile)

    ptcls = []
    if not (mpi or pypar) or ((mpi and mpi.rank == 0) or (pypar and pypar.rank == 0)):
        ptcls = EMAN.image2list(rawimage)
        ptcls = ptcls[first:last]

        print "Read %d particle parameters" % (len(ptcls))
        # ptcls = ptcls[0:10]

    if mpi and mpi.size > 1:
        ptcls = mpi.bcast(ptcls)
        print "rank=%d\t%d particles" % (mpi.rank, len(ptcls))
    elif pypar and pypar.size() > 1:
        ptcls = pypar.broadcast(ptcls)
        print "rank=%d\t%d particles" % (pypar.rank(), len(ptcls))

    if sffile:
        sf = EMAN.XYData()
        sf.readFile(sffile)
        sf.logy()

    if not mpi or ((mpi and mpi.rank == 0) or (pypar and pypar.rank() == 0)):
        if cmplstfile and projfile:
            if output_ptcls:
                raw_tmp = output_ptcls
            else:
                raw_tmp = rawimage
            raw_tmp = rawimage
            fp = open("tmp-" + cmplstfile, "w")
            fp.write("#LST\n")
            for i in range(len(ptcls)):
                fp.write("%d\t%s\n" % (first + i, projfile))
                fp.write("%d\t%s\n" % (first + i, raw_tmp))
            fp.close()
        if (mpi and mpi.size > 1 and mpi.rank == 0) or (pypar and pypar.size() > 1 and pypar.rank() == 0):
            total_recv = 0
            if output_ptcls:
                total_recv += len(ptcls)
            if projfile:
                total_recv += len(ptcls)
            for r in range(total_recv):
                # print "before recv from %d" % (r)
                if mpi:
                    msg, status = mpi.recv()
                else:
                    msg = pypar.receive(r)
                    # print "after recv from %d" % (r)
                    # print msg, status
                d = emdata_load(msg[0])
                fname = msg[1]
                index = msg[2]
                d.writeImage(fname, index)
                print "wrtie %s %d" % (fname, index)
            if options.ortlstfile:
                solutions = []
                for r in range(1, mpi.size):
                    msg, status = mpi.recv(source=r, tag=r)
                    solutions += msg

                def ptcl_cmp(x, y):
                    eq = cmp(x[0], y[0])
                    if not eq:
                        return cmp(x[1], y[1])
                    else:
                        return eq

                solutions.sort(ptcl_cmp)
    if (not mpi or (mpi and ((mpi.size > 1 and mpi.rank > 0) or mpi.size == 1))) or (
        not pypar or (pypar and ((pypar.size() > 1 and pypar.rank() > 0) or pypar.size() == 1))
    ):
        map3d = EMAN.EMData()
        map3d.readImage(refmap, -1)
        map3d.normalize()
        if shrink > 1:
            map3d.meanShrink(shrink)
        map3d.realFilter(0, 0)  # threshold, remove negative pixels

        imgsize = map3d.ySize()

        img = EMAN.EMData()

        ctffilter = EMAN.EMData()
        ctffilter.setSize(imgsize + 2, imgsize, 1)
        ctffilter.setComplex(1)
        ctffilter.setRI(1)

        if (mpi and mpi.size > 1) or (pypar and pypar.size() > 1):
            ptclset = range(mpi.rank - 1, len(ptcls), mpi.size - 1)
        else:
            ptclset = range(0, len(ptcls))

        if mpi:
            print "Process %d/%d: %d/%d particles" % (mpi.rank, mpi.size, len(ptclset), len(ptcls))

        solutions = []
        for i in ptclset:
            ptcl = ptcls[i]
            e = EMAN.Euler(ptcl[2], ptcl[3], ptcl[4])
            dx = ptcl[5] - imgsize / 2
            dy = ptcl[6] - imgsize / 2
            print "%d\talt,az,phi=%8g,%8g,%8g\tx,y=%8g,%8g" % (
                i + first,
                e.alt() * 180 / pi,
                e.az() * 180 / pi,
                e.phi() * 180 / pi,
                dx,
                dy,
            ),

            img.readImage(ptcl[0], ptcl[1])
            img.setTAlign(-dx, -dy, 0)
            img.setRAlign(0, 0, 0)
            img.rotateAndTranslate()  # now img is centered
            img.applyMask(int(mask - max(abs(dx), abs(dy))), 6, 0, 0, 0)
            if img.hasCTF():
                fft = img.doFFT()

                ctfparm = img.getCTF()
                ctffilter.setCTF(ctfparm)
                if options.phasecorrected:
                    if sffile:
                        ctffilter.ctfMap(64, sf)  # Wiener filter with 1/CTF (no sign) correction
                else:
                    if sffile:
                        ctffilter.ctfMap(32, sf)  # Wiener filter with 1/CTF (including sign) correction
                    else:
                        ctffilter.ctfMap(2, EMAN.XYData())  # flip phase

                fft.mult(ctffilter)
                img2 = fft.doIFT()  # now img2 is the CTF-corrected raw image

                img.gimmeFFT()
                del fft
            else:
                img2 = img

            img2.normalize()
            if shrink > 1:
                img2.meanShrink(shrink)
            # if sffile:
            # 	snrcurve = img2.ctfCurve(9, sf)	# absolute SNR
            # else:
            # 	snrcurve = img2.ctfCurve(3, EMAN.XYData())		# relative SNR

            e.setSym(startSym)
            maxscore = -1e30  # the larger the better
            scores = []
            for s in range(e.getMaxSymEl()):
                ef = e.SymN(s)
                # proj = map3d.project3d(ef.alt(), ef.az(), ef.phi(), -6)		# Wen's direct 2D accumulation projection
                proj = map3d.project3d(
                    ef.alt(), ef.az(), ef.phi(), -1
                )  # Pawel's fast projection, ~3 times faster than mode -6 with 216^3
                # don't use mode -4, it modifies its own data
                # proj2 = proj
                proj2 = proj.matchFilter(img2)
                proj2.applyMask(int(mask - max(abs(dx), abs(dy))), 6, 0, 0, 0)
                if scorefunc == "ncccmp":
                    score = proj2.ncccmp(img2)
                elif scorefunc == "lcmp":
                    score = -proj2.lcmp(img2)[0]
                elif scorefunc == "pcmp":
                    score = -proj2.pcmp(img2)
                elif scorefunc == "fsccmp":
                    score = proj2.fscmp(img2, [])
                elif scorefunc == "wfsccmp":
                    score = proj2.fscmp(img2, snrcurve)
                if score > maxscore:
                    maxscore = score
                    best_proj = proj2
                    best_ef = ef
                    best_s = s
                scores.append(score)
                # proj2.writeImage("proj-debug.img",s)
                # print "\tsym %2d/%2d: euler=%8g,%8g,%8g\tscore=%12.7g\tbest=%2d euler=%8g,%8g,%8g score=%12.7g\n" % \
                # 		   (s,60,ef.alt()*180/pi,ef.az()*180/pi,ef.phi()*180/pi,score,best_s,best_ef.alt()*180/pi,best_ef.az()*180/pi,best_ef.phi()*180/pi,maxscore)
            scores = Numeric.array(scores)
            print "\tbest=%2d euler=%8g,%8g,%8g max score=%12.7g\tmean=%12.7g\tmedian=%12.7g\tmin=%12.7g\n" % (
                best_s,
                best_ef.alt() * 180 / pi,
                best_ef.az() * 180 / pi,
                best_ef.phi() * 180 / pi,
                maxscore,
                MLab.mean(scores),
                MLab.median(scores),
                MLab.min(scores),
            )
            if projfile:
                best_proj.setTAlign(dx, dy, 0)
                best_proj.setRAlign(0, 0, 0)
                best_proj.rotateAndTranslate()

                best_proj.set_center_x(ptcl[5])
                best_proj.set_center_y(ptcl[6])
                best_proj.setRAlign(best_ef)
                # print "before proj send from %d" % (mpi.rank)

                if mpi and mpi.size > 1:
                    mpi.send((emdata_dump(best_proj), projfile, i + first), 0)
                elif pypar and pypar.size() > 1:
                    pypar.send((emdata_dump(best_proj), projfile, i + first), 0)
                # print "after proj send from %d" % (mpi.rank)
                else:
                    best_proj.writeImage(projfile, i + first)

            img2.setTAlign(0, 0, 0)
            img2.setRAlign(best_ef)
            img2.setNImg(1)
            # print "before raw send from %d" % (mpi.rank)
            if output_ptcls:
                if mpi and mpi.size > 1:
                    mpi.send((emdata_dump(img2), output_ptcls, i + first), 0)
                elif pypar and pypar.size() > 1:
                    pypar.send((emdata_dump(img2), output_ptcls, i + first), 0)
                # print "after raw send from %d" % (mpi.rank)
                else:
                    img2.writeImage(output_ptcls, i + first)

            solutions.append((ptcl[0], ptcl[1], best_ef.alt(), best_ef.az(), best_ef.phi(), ptcl[5], ptcl[6]))
        if mpi and (mpi.size > 1 and mpi.rank > 0):
            mpi.send(solutions, 0, tag=mpi.rank)

    if mpi:
        mpi.barrier()
    elif pypar:
        pypar.barrier()
    if mpi:
        mpi.finalize()
    elif pypar:
        pypar.finalize()

    if options.cmplstfile:
        os.rename("tmp-" + cmplstfile, cmplstfile)
    if options.ortlstfile:
        lFile = open(options.ortlstfile, "w")
        lFile.write("#LST\n")
        for i in solutions:
            lFile.write(
                "%d\t%s\t%g\t%g\t%g\t%g\t%g\n"
                % (i[1], i[0], i[2] * 180.0 / pi, i[3] * 180.0 / pi, i[4] * 180.0 / pi, i[5], i[6])
            )
        lFile.close()

    if not options.nocmdlog:
        EMAN.LOGend()
Exemplo n.º 45
0
# This module is a lite version of LinAlg.py module which contains
Exemplo n.º 46
0

	### Check 2nd moments to see if globular
	axisMatrix=[]
	index1=0
	neighborhood=[]
	pointcloud=[]
	betadistance=8.25
	while index1 < atomCount:
		NumPoints1=Numeric.array(pixels[index1],'d')
		NumPoints1_mean=Numeric.sum(NumPoints1)/len(NumPoints1)
		NumPoints2=NumPoints1-NumPoints1_mean

		points1=Numeric.sum(map(Numeric.innerproduct,NumPoints2,NumPoints2))
		h = Numeric.sum(map(Numeric.outerproduct,NumPoints2,NumPoints2))
		[u1,x1,v1]=MLab.svd(h)
		if x1==[0,0,0]:
			print index1,
			print "is bad"
			xmod=x1
		else:
			xmod=x1/max(x1)
		if xmod[2]==0:
			aspectratio=0

		else:
			aspectratio=xmod[1]/xmod[2]
		axisMatrix.append(aspectratio)


	### checks for nearest atoms and nearest helical atoms
Exemplo n.º 47
0
def rosen(x):  # The Rosenbrock function
    return MLab.sum(100.0*(x[1:]-x[:-1]**2.0)**2.0 + (1-x[:-1])**2.0)
Exemplo n.º 48
0
import MLab

# start them at random positions
self.x = MLab.rand(self.number) * 640
self.y = MLab.rand(self.number) * 480

# draw them on the canvas
self.dots = []
for i in range(self.number):
    self.dots.append(self.canvas.create_rectangle(self.x[i] - 1,self.y[i] - 1,self.x[i] + 1,self.y[i] + 1,
                                                  fill="#006600",outline="#006600",width=0))

Exemplo n.º 49
0
def fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5, maxiter=None, fulloutput=0, printmessg=1):
    """xopt = fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5,
                       maxiter=None, fulloutput=0, printmessg=1)

    Optimize the function, f, whose gradient is given by fprime using the
    quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)
    See Wright, and Nocedal 'Numerical Optimization', 1999, pg. 198.
    """

    app_fprime = 0
    if fprime is None:
        app_fprime = 1

    x0 = Num.asarray(x0)
    if maxiter is None:
        maxiter = len(x0)*200
    func_calls = 0
    grad_calls = 0
    k = 0
    N = len(x0)
    gtol = N*avegtol
    I = MLab.eye(N)
    Hk = I

    if app_fprime:
        gfk = apply(approx_fprime,(x0,f)+args)
        func_calls = func_calls + len(x0) + 1
    else:
        gfk = apply(fprime,(x0,)+args)
        grad_calls = grad_calls + 1
    xk = x0
    sk = [2*gtol]
    while (Num.add.reduce(abs(gfk)) > gtol) and (k < maxiter):
        pk = -Num.dot(Hk,gfk)
        alpha_k, fc, gc = line_search_BFGS(f,xk,pk,gfk,args)
        func_calls = func_calls + fc
        xkp1 = xk + alpha_k * pk
        sk = xkp1 - xk
        xk = xkp1
        if app_fprime:
            gfkp1 = apply(approx_fprime,(xkp1,f)+args)
            func_calls = func_calls + gc + len(x0) + 1
        else:
            gfkp1 = apply(fprime,(xkp1,)+args)
            grad_calls = grad_calls + gc + 1

        yk = gfkp1 - gfk
        k = k + 1

        rhok = 1 / Num.dot(yk,sk)
        A1 = I - sk[:,Num.NewAxis] * yk[Num.NewAxis,:] * rhok
        A2 = I - yk[:,Num.NewAxis] * sk[Num.NewAxis,:] * rhok
        Hk = Num.dot(A1,Num.dot(Hk,A2)) + rhok * sk[:,Num.NewAxis] * sk[Num.NewAxis,:]
        gfk = gfkp1


    if printmessg or fulloutput:
        fval = apply(f,(xk,)+args)
    if k >= maxiter:
        warnflag = 1
        if printmessg:
            print "Warning: Maximum number of iterations has been exceeded"
            print "         Current function value: %f" % fval
            print "         Iterations: %d" % k
            print "         Function evaluations: %d" % func_calls
            print "         Gradient evaluations: %d" % grad_calls
    else:
        warnflag = 0
        if printmessg:
            print "Optimization terminated successfully."
            print "         Current function value: %f" % fval
            print "         Iterations: %d" % k
            print "         Function evaluations: %d" % func_calls
            print "         Gradient evaluations: %d" % grad_calls

    if fulloutput:
        return xk, fval, func_calls, grad_calls, warnflag
    else:        
        return xk
Exemplo n.º 50
0
    def initcoeffs(self):
	"initcoeffs(self) - initialize fit coeffs according fit function id #"
	list = functions.keys()
	x = self.x
	y = self.y
	(fwhm,xpeak,ypeak) = calcfwhm(x,y)
	npt = len(x)
	id = self.id
	num = functions[list[id]]['NumPar']	
	ymin,ymax = min(y),max(y)
	self.max = ymax
	self.min = ymin
	self.parms[1].delete(0,200)
	import MLab 
	if id == 0:  # Sine
		x0 = 0.5*(x[npt-1]-x[0])
		w = x[npt-1] - x[0]
		a = ymax
		p =  str(x0)+ ', '+ str(a) + ', '+str(w)
	if id == 1: # Rational0
		a = x[0]
		b = y[0]
		c = x[npt-1]
		p =  str(a)+ ', '+ str(b) + ', '+str(c)
	if id ==2: # Linear
		a = y[0]
		b = (y[npt-1]-a)/(x[npt-1]-x[0])
		p =  str(a)+ ', '+ str(b) 
	if id == 3: # Allometric
		a = 1. 
		x0 = x[0]
		c = ymin
		p =  str(a)+ ', '+str(x0) + ', ' + str(c)
	if id == 4: # Cuadratic
		x0 = x[npt/2]
		a = (ymax-ymin)/(x[npt-1]-x[0])
		b = -x[npt/2]
		c = ymin
		p =  str(x0) + ', ' + str(a)+ ', '+ str(b) + ', '+str(c) 
	if id == 5: # Gauss
		y0 =  y[0]
		x0 = x[npt/2]
		a =  (ymax-ymin)
		w = MLab.std(y)  
		w = -(x[npt-1]-x[0])/10
		p =  str(a)+ ', '+ str(x0) + ', '+str(w) +', '+str(y0)
	if id == 6: # Boltzman
		x0 = 0.5*(x[npt-1]+x[0])
		a1 = 2.*y[npt/2]-y[npt-1]+y[0]
		a2 = 2.*y[npt/2]+y[npt-1]-y[0]
		dx = x[npt-1]-x[0]
		p =  str(x0)+ ', '+ str(a1) + ', '+str(a2) +', '+str(dx)
	if id == 7: # ExpGrow
		x0 = x[0]
		y0 = y[0]
		a = (y[npt-1]-y[0])
		t = (x[npt-1]-x[0])
		p =  str(x0)+ ', '+ str(y0) + ', '+str(a) +', '+str(t)
	if id == 8: # Lorentz
		a = ypeak
		x0 = xpeak
		w = fwhm/2
		p =  str(a)+ ', '+str(x0) +', '+str(w)
		print p
	if id == 9: # Expassoc
		y0 = y[0]
		a1 = y[npt/2]*.5
		t1 = x[npt-1]-x[0]
		a2 = y[npt/2]*.5
		t2 = x[npt-1]-x[0]
		p =  str(y0)+ ', '+ str(a1) + ', '+str(t1) +', '+str(a2) +', '+str(t2)
	if id == 10: # Logistic
		x0 = x[npt-1]-x[0]
		p = .5
		a2 = (x[npt-1]/x0)**p*(y[npt-1]/y[npt/2]-(x[npt/2]/x[npt-1])**p)/(1.-y[npt-1]/y[npt/2]) - 1
		a1 = a2+y[npt/2]*(1.+a2+(x[npt/2]/x0)**p)
		p =  str(x0)+ ', '+ str(a1) + ', '+str(a2) +', '+str(p)
	if id == 11: # GaussAmp
		a = ymax-ymin
		w = -(x[npt-1]-x[0])/6
		x0=x[npt/2]
		y0 = y[0]
		p =  str(a)+ ', '+ str(x0) + ', '+str(w) +', '+str(y0)
#		a0 = ymax*.8
#		a1 = .5*(x[npt-1]-x[0])
#		a2 = a1/4.
#		a3 = 1.1
#		a4 = a1*a1 
#		a5 = a1
#		p =  str(a0)+ ', '+ str(a1) + ', '+str(a2) +', '+str(a3) + ', '+str(a4) +', '+str(a5)
	if id == 12: # Pulse
		x0 = x[npt/2]
		y0 = ymin
		a = (ymax-ymin)/2
		t1 = (x[npt-1]-x[0])/2.
		t2 = t1
		p =  str(x0)+ ', '+ str(y0) + ', '+str(a) +', '+str(t1) +', '+str(t2)
	if id == 13: # Hyperbl
		p2 = (x[0]*(y[0]-1.) - x[npt/2]*(y[npt/2]-1.))/(y[npt/2]-y[0])
		p1 = p2*y[npt/2] + x[0]*(y[0]-1.)
		p =  str(p1)+ ', '+ str(p2) 
	if id == 14: # ExpDecay
		x0 = x[0]
		y0 = ymax
		a = ymax-ymin
		t = (x[npt-1]-x[0])
		p =  str(x0)+ ', '+ str(y0) + ', '+str(a) + ', '+str(t)

	print 'coeffs=',p 
	self.parms[1].insert(0,p)