def __init__(self,mux=0,sigmax=1,muy=0,sigmay=1,n=100): self.mux=mux self.sigmax=sigmax self.muy=muy self.sigmay=sigmay self.n=n p=list(map(lambda t:(t+.5)/self.n,range(self.n))) normx=norm(self.mux,self.sigmax) normy=norm(self.muy,self.sigmay) EquiProbx=array(normx.isf(p)) EquiProby=array(normy.isf(p)) EquiProb=EquiProbx.reshape(n,1)*EquiProby self.EquiProb=EquiProb.reshape(1,n*n)[0]
def __init__(self, mux=0, sigmax=1, muy=0, sigmay=1, n=100): self.mux = mux self.sigmax = sigmax self.muy = muy self.sigmay = sigmay self.n = n p = list(map(lambda t: (t + .5) / self.n, range(self.n))) normx = norm(self.mux, self.sigmax) normy = norm(self.muy, self.sigmay) EquiProbx = array(normx.isf(p)) EquiProby = array(normy.isf(p)) EquiProb = EquiProbx.reshape(n, 1) * EquiProby self.EquiProb = EquiProb.reshape(1, n * n)[0]
def marquardt(x0, m, eps, f): k = 0 lam = 100000.0 x = x0.copy() new_grad = True while k < m: if new_grad: grad = gradient(f, x, eps) new_grad = False if norm(grad) < eps: break h = hesse(f, x, eps) for i in range(len(h)): h[i][i] += lam h = inverse_matrix(h) x1 = x.copy() for i in range(len(x1)): for j in range(len(h)): x1[i] -= h[i][j]*grad[j] if f(x1) < f(x): lam /= 2 k += 1 new_grad = True x = x1.copy() else: lam *= 2 return x, f(x)
def __new__(self,ODB, CurRec,nfield,DensDatByDenCat,DDBR,QuantileUse,n=100,p=None): self.n=n self.p=p if self.p==None:self.p=list(map(lambda t:(t+.5)/self.n,range(self.n))) CurValue={} #Values directly from Database for i in range(nfield):CurValue[ODB.Fname[i]]=CurRec[i] #Incorporate estimated abundance and confidence bounds into the dictionary SiteMean=CurValue['BedArea']*10000 #Convert bed-area from hectares to square metres SiteStDev=CurValue['BedAreaSE']*10000 WeightMean=CurValue['MeanWt']/2.204622622 #Convert mean weight from pounds to kilos WeightStEr=CurValue['MeanWtSE']/2.204622622 DistWeight=norm( WeightMean,WeightStEr) DistArea =LowHalfNormal(SiteMean ,SiteStDev) DistWeightByArea=ProdDistributions(DistWeight,DistArea,p=self.p) BiomassDC=ProdDistributions(DistWeightByArea,DensDatByDenCat,p=self.p)#Based upon Density-class and region BiomassQR=ProdDistributions(DistWeightByArea,DDBR,p=self.p)#Based on Region only CurValue['CBBiomassDC']=BiomassDC.isf(QuantileUse) CurValue['CBBiomassQR']=BiomassQR.isf(QuantileUse) CurValue['n_DenCat']=len(DensDatByDenCat) CurValue['n_Region']=len(DDBR) return(CurValue)
def GetMeanTranLen(self): TranLen = list(map(lambda t: t.GetTranLength(), self.ListTransects)) n = len(TranLen) if n < 1: return (norm(-sys.maxsize, -sys.maxsize)) try: mu = average(TranLen) except: mu = average(TranLen) if n > 1: sigma = std(TranLen, ddof=1) sterr = sigma / sqrt(n) else: sigma = -sys.maxsize sterr = -sys.maxsize result = norm(mu, sterr) return (result)
def get_radii(self,positions): """This function sets self.radii_matrix and self.distances_matrix to the appropraite values to be used in the equation of mation """ # The radius matrix's dimensions are # [self.how_many,self.how_many,2]. This is because each radius # is actually a vector of length 2. The distance matrix's # dimensions are [self.how_many,self.how_many] because it is # an array of scalars. rad_mat = np.zeros([self.how_many,self.how_many,2]) dist_mat = np.zeros([self.how_many,self.how_many]) for i in range(self.how_many): for j in range(self.how_many): # To avoid having to do excess calculations, if # the row number is larger than the column number, # the new radius and distance is set to the negative # of the one with the opposite index if (i > j): rad_mat[i,j] = -rad_mat[j,i] dist_mat[i,j] = dist_mat[j,i] elif (i == j): rad_mat[i,j] = np.zeros([2]) dist_mat[i,j] = float(0) elif (i != j): rad_mat[i,j] = \ positions[j] - \ positions[i] dist_mat[i,j] = norm(rad_mat[i,j]) return rad_mat,dist_mat
def CalcDigitizedArea(self): self.FromSiteAnalysisData() if self.DigitizedArea != None: return self.FromSiteSummary() if self.DigitizedArea != None: return self.DigitizedArea = norm(MinInt, 0.) return
def FromSiteAnalysisData(self): '''Try to calculate site-area from data in the AnalysisData table''' query = 'SELECT DISTINCT SiteAnalysisData.DigitizedArea ' query += 'FROM SiteAnalysisData ' query += 'WHERE (((SiteAnalysisData.SurveyTitle)="' query += self.survey query += '") AND ((SiteAnalysisData.Year)= ' query += str(self.year) query += ') AND ((SiteAnalysisData.SurveySite)= ' query += str(self.site) query += '));' try: self.ODB.execute(query) area = self.ODB.GetVariable( 'DigitizedArea')[0] * 10000 #convert hectares to square metres self.DigitizedArea = norm(area, area / 10.) if area < 0: self.DigitizedArea = norm(MinInt * 10000, 0.) except: self.DigitizedArea = norm(MinInt * 10000, 0.)
def AreaFromLOBF(self): self.GetLOBF() if self.LOBF == None: self.LOBFarea = norm(MinInt, 0.) return MTL = self.GetMeanTranLen() try: if (len(self.ListTransects) > 1): self.LOBFarea = norm(MTL.mu * self.LOBF, MTL.sigma * self.LOBF) return except: print('SiteSize 117') print('MTL', MTL) print('len(self.ListTransects) ', len(self.ListTransects)) if (len(self.ListTransects) > 1): self.LOBFarea = norm(MTL[0] * self.LOBF, MTL[1] * self.LOBF) return if (len(self.ListTransects) > 0): self.LOBFarea = norm(MTL.mu * self.LOBF, 0.) return self.LOBFarea = norm(MinInt, 0.)
def __init__(self,mux=0,sigmax=1,muh=0,sigmah=1,n=100): self.mux=mux self.sigmax=sigmax self.muh=muh self.sigmah=sigmah self.n=n p=list(map(lambda t:(t+.5)/self.n,range(self.n))) normx=norm(self.mux,self.sigmax) normh=LowHalfNormal(self.muh,self.sigmah) EquiProbx=array(normx.isf(p)) EquiProbh=array(normh.isf(p)) EquiProb=EquiProbx.reshape(n,1)*EquiProbh self.EquiProb=EquiProb.reshape(1,n*n)[0]
def __new__(self, ODB, CurRec, nfield, DensDatByDenCat, DDBR, DDBR_pr, QuantileUse, n=100, p=None): self.n = n self.p = p if self.p == None: self.p = list(map(lambda t: (t + .5) / self.n, range(self.n))) CurValue = {} #Values directly from Database for i in range(nfield): CurValue[ODB.Fname[i]] = CurRec[i] #Incorporate estimated abundance and confidence bounds into the dictionary SiteMean = CurValue[ 'BedArea'] * 10000 #Convert bed-area from hectares to square metres SiteStDev = CurValue['BedAreaSE'] * 10000 WeightMean = CurValue[ 'MeanWt'] / 2.204622622 #Convert mean weight from pounds to kilos WeightStEr = CurValue['MeanWtSE'] / 2.204622622 DistWeight = norm(WeightMean, WeightStEr) DistArea = LowHalfNormal(SiteMean, SiteStDev) DistWeightByArea = ProdDistributions(DistWeight, DistArea, p=self.p) BiomassDC = ProdDistributions( DistWeightByArea, DensDatByDenCat, p=self.p) #Based upon Density-class and region BiomassQR = ProdDistributions(DistWeightByArea, DDBR, p=self.p) #Based on Region only #Biomass_pr_DC=ProdDistributions(DistWeightByArea,DensDatByDenCat_pr,p=self.p)#Based upon Density-class and region Biomass_pr_QR = ProdDistributions(DistWeightByArea, DDBR_pr, p=self.p) #Based on Region only CurValue['CBBiomassDC'] = BiomassDC.isf(QuantileUse) CurValue['CBBiomassQR'] = BiomassQR.isf(QuantileUse) #CurValue['CBBiomass_prDC']=Biomass_pr_DC.isf(QuantileUse) CurValue['CBBiomass_prQR'] = Biomass_pr_QR.isf(QuantileUse) CurValue['n_DenCat'] = len(DensDatByDenCat) CurValue['n_Region'] = len(DDBR) return (CurValue)
def FromSiteSummary(self): '''Try to calculate site-area from data in the SiteSummary table''' query = 'SELECT DISTINCT SiteSummary.Survey ' query += 'FROM SiteSummary ' query += 'WHERE (((SiteSummary.SurveyTitle)="' query += self.survey query += '") AND ((SiteSummary.Year)= ' query += str(self.year) query += ') AND ((SiteSummary.SurveySite)= ' query += str(self.site) query += '));' try: self.ODB.execute(query) area = ODB.GetVariable('SurveySiteArea') self.DigitizedArea = norm(sum(area), 0.) except: self.DigitizedArea = None
def quad_form(x, P): """ Alias for :math:`x^T P x`. """ x, P = map(Expression.cast_to_const, (x,P)) # Check dimensions. n = P.size[0] if P.size[1] != n or x.size != (n,1): raise Exception("Invalid dimensions for arguments.") if x.is_constant(): return x.T * P * x elif P.is_constant(): np_intf = intf.get_matrix_interface(np.ndarray) P = np_intf.const_to_matrix(P.value) sgn, scale, M = _decomp_quad(P) return sgn * scale * square(norm(Constant(M.T) * x)) else: raise Exception("At least one argument to quad_form must be constant.")
def __init__(self,ODB,SurveyTitle,Year,SurveySite=None): '''ADOWeightVal(ODB,HeaderValues) * ODB is an open geoduck bio-database''' self.ODB=ODB query ="select " query+="SiteAnalysisData.MeanWeight as EstMeanWeight, SiteAnalysisData.MeanWeightSE, SiteAnalysisData.MeanWeightSource from SiteAnalysisData " query+=self.WhereQuery(SurveyTitle,Year,SurveySite) query+=";" ODB.execute(query) self.EstMeanWeight =ODB.GetVariable('EstMeanWeight') self.MeanWeightSE =ODB.GetVariable('MeanWeightSE') self.MeanWeightSource =ODB.GetVariable('MeanWeightSource') if isinstance(self.EstMeanWeight,(list,ndarray)):self.EstMeanWeight=self.EstMeanWeight[0] if isinstance(self.MeanWeightSE ,(list,ndarray)):self.MeanWeightSE =self.MeanWeightSE[0] if isinstance(self.MeanWeightSource ,(list,ndarray)):self.MeanWeightSource =self.MeanWeightSource[0] if self.EstMeanWeight==None:self.EstMeanWeight=-1. if self.MeanWeightSE==None:self.MeanWeightSE=0. if self.MeanWeightSource==None:self.MeanWeightSource='Unknown' if self.MeanWeightSource=='':self.MeanWeightSource='Unknown' self.RandSource=norm(self.EstMeanWeight,self.MeanWeightSE)
# Importing Modules import norm as nm import smooth as sm import readfits as rdf import plot as pt import write_moog as wtm import write as wt #Open and read the fits file wavelength, flux = rdf.readfits() #Normalizing norm_flux = nm.norm(flux) #Smoothing smooth_wavelength, smooth_flux = sm.smooth(wavelength, norm_flux) #Plotting the data pt.plot(wavelength, flux, smooth_wavelength, smooth_flux) #Writing to Text file (For moog) star_name, number_of_variables = wtm.write_moog(smooth_wavelength, smooth_flux) #Writing to a plain text file (with header) wt.write_txt(smooth_wavelength, smooth_flux, star_name, number_of_variables, wavelength, flux)
# Creates matrix of given data. Matrix dimension is LEN-N x N def create_input_matrix(sunspot, n): training_in = [] i = 0 while i < len(sunspot) - n: line = sunspot[i:n+i] training_in.append(line) i += 1 return np.array(training_in) # readed data from file data = read_data(FILE_PATH) years, spots = split_data(data) # normalise data norm_spots = norm(spots, 0, 1) # split for training and validation train_spots = norm_spots[:200] valid_spots = norm_spots[200:] # converted data to matrices - ready to work with it training_out = np.array(train_spots[N:]).reshape(-1, 1) training_in = create_input_matrix(train_spots, N) # learn neur_net = NeuralNetwork(N) neur_net.train(training_in, training_out, 100000, 0.0001) print("Weights after training:") print(neur_net.synaptic_weights)
self.value=mquantiles(equiProb,self.p) except: self.value=list(map(lambda x:-1,self.p)) def isf(self,p=None,n=None): if p==None: if n==None: return(self.value) else: p=list(map(lambda t: (t+.5)/n,range(n))) if self.value==[]:return(list(map(lambda x:-1,p))) result=mquantiles(self.value,p) return(result) if __name__ == "__main__": from norm import norm from LowHalfNormal import LowHalfNormal p=[.0005,.005,.05,.15,.25,.35,.45,.55,.65,.75,.85,.95,.995,.9995] test1=LowHalfNormal(10,1) test2=norm(20,2) test3=array(range(10)) test4=ProdDistributions(test1,test2) print( 'test4.isf(n=3) ', test4.isf(n=10)) test5=ProdDistributions(test4,test3) print( 'test5.isf(n=10) ', test5.isf(n=10))
except: self.value = list(map(lambda x: -1, self.p)) def isf(self, p=None, n=None): if p == None: if n == None: return (self.value) else: p = list(map(lambda t: (t + .5) / n, range(n))) if self.value == []: return (list(map(lambda x: -1, p))) result = mquantiles(self.value, p) return (result) if __name__ == "__main__": from norm import norm from LowHalfNormal import LowHalfNormal p = [ .0005, .005, .05, .15, .25, .35, .45, .55, .65, .75, .85, .95, .995, .9995 ] test1 = LowHalfNormal(10, 1) test2 = norm(20, 2) test3 = array(range(10)) test4 = ProdDistributions(test1, test2) print('test4.isf(n=3) ', test4.isf(n=10)) test5 = ProdDistributions(test4, test3) print('test5.isf(n=10) ', test5.isf(n=10))
def eom(W): w = W.reshape([self.how_many,2]) radii_matrix, distances_matrix = self.get_radii(w) dist_max2 = simulation_values.attract_upper dist_min2 = simulation_values.attract_lower for i in range(self.how_many): rx = 0. ry = 0. r0 = bool(0) if (self.mol_all[i].t == mol_type.OKT3): r0 = bool(1) else: for k in range(self.how_many): if (i == k): pass elif (distances_matrix[i,k] > dist_max2): pass # For when there's only one OKT3 elif ((simulation_values.ok_count == 1) and \ (self.mol_all[i].t == mol_type.ACTIN) and \ (self.mol_all[k].t == mol_type.OKT3) and \ (distances_matrix[i,k] > simulation_values.cortex_rad)): r0 = bool(1) self.mol_all[i].age = self.mol_all[i].age*.1 break elif (((self.mol_all[i].t == mol_type.ACTIN) and \ (self.mol_all[k].t == mol_type.ACTIN)) and \ (distances_matrix[i,k] < simulation_values.repulsion_radius_act)): dm = distances_matrix[i,k] rx += -radii_matrix[i,k][0]/(dm/simulation_values.repuls_factor) ry += -radii_matrix[i,k][1]/(dm/simulation_values.repuls_factor) elif (((self.mol_all[i].t == mol_type.MYOSIN) and \ (self.mol_all[k].t == mol_type.MYOSIN)) and \ (distances_matrix[i,k] < simulation_values.repulsion_radius_myo)): dm = distances_matrix[i,k] rx += -radii_matrix[i,k][0]/(dm/simulation_values.repuls_factor) ry += -radii_matrix[i,k][1]/(dm/simulation_values.repuls_factor) elif (distances_matrix[i,k] < dist_min2): pass elif ((self.mol_all[i].t == mol_type.MYOSIN) and \ (self.mol_all[k].t == mol_type.ACTIN)): dm = distances_matrix[i,k] rx += radii_matrix[i,k][0]/dm ry += radii_matrix[i,k][1]/dm elif ((self.mol_all[i].t == mol_type.ACTIN) and \ (self.mol_all[k].t == mol_type.MYOSIN)): dm = distances_matrix[i,k] rx += radii_matrix[i,k][0]/dm ry += radii_matrix[i,k][1]/dm if ((rx != 0) or (ry != 0)): rm = norm(np.array([rx,ry])) if (rm > 1.): rx = rx/math.sqrt(rm) ry = ry/math.sqrt(rm) if r0: vx = 0. vy = 0. if (self.mol_all[i].t == mol_type.ACTIN): vx = rx*dv_act vy = ry*dv_act if (self.mol_all[i].t == mol_type.MYOSIN): vx = rx*dv_myo vy = ry*dv_myo self.mol_all[i].set_position([self.mol_all[i].pos[0]+vx, \ self.mol_all[i].pos[1]+vy])