def optimize_Cheb(self, *args): ''' Keeping the current Theta parameters fixed and assuming white noise, optimize the Chebyshev parameters ''' if self.chebyshevSpectrum.fix_c0: p0 = np.zeros((self.npoly - 1)) self.fix_c0 = True else: p0 = np.zeros((self.npoly)) self.fix_c0 = False def fprob(p): self.chebyshevSpectrum.update(p) lnp = self.evaluate() print(self.order, p, lnp) if lnp == -np.inf: return 1e99 else: return -lnp from scipy.optimize import fmin result = fmin(fprob, p0, maxiter=10000, maxfun=10000) print(self.order, result) # Due to a JSON bug, np.int64 type objects will get read twice, # and cause this routine to fail. Therefore we have to be careful # to convert these to ints. phi = PhiParam(spectrum_id=int(self.spectrum_id), order=int(self.order), fix_c0=self.chebyshevSpectrum.fix_c0, cheb=result) phi.save()
def lnfunc(p): # Convert p array into a PhiParam object ind = self.npoly #if self.chebyshevSpectrum.fix_c0: # ind -= 1 ind=0 #cheb = p[0:ind] cheb = [0, 0, 0] sigAmp = p[ind] ind+=1 logAmp = p[ind] ind+=1 l = p[ind] par = PhiParam(self.spectrum_id, self.order, self.chebyshevSpectrum.fix_c0, cheb, sigAmp, logAmp, l) self.update_Phi(par) # sigAmp must be positive (this is effectively a prior) # See https://github.com/iancze/Starfish/issues/26 if not (0.0 < sigAmp): self.lnprob_last = self.lnprob lnp = -np.inf self.logger.debug("sigAmp was negative, returning -np.inf") self.lnprob = lnp # Same behavior as self.evaluate() else: lnp = self.evaluate() self.logger.debug("Evaluated Phi parameters: {} {}".format(par, lnp)) return lnp
def lnprob_all(p): pars1 = ThetaParam(grid=p[0:2], vz=p[2], vsini=p[3], logOmega=p[4]) model.update_Theta(pars1) # hard code npoly=3 (for fixc0 = True with npoly=4) pars2 = PhiParam(0, 0, True, p[5:8], p[8], p[9], p[10]) model.update_Phi(pars2) draw = model.draw_save() return draw
def setup_class(self): self.phiparam = PhiParam(spectrum_id=0, order=22, fix_c0=True, cheb=np.zeros((4, )), sigAmp=1.0, logAmp=-5.0, l=20., regions=np.ones((4, 3)))
def lnlike(p): try: pars1 = ThetaParam(grid=p[0:2], vz=p[2], vsini=p[3], logOmega=p[4]) model.update_Theta(pars1) # hard code npoly=3 (for fixc0 = True with chebyshev polynomials turned off) pars2 = PhiParam(0, 0, True, [0.0, 0.0, 0.0], p[5], p[6], p[7]) model.update_Phi(pars2) lnp = model.evaluate() return lnp except C.ModelError: model.logger.debug("ModelError in stellar parameters, sending back -np.inf {}".format(p)) return -np.inf
def lnfunc(p): # Convert p array into a PhiParam object ind = self.npoly if self.chebyshevSpectrum.fix_c0: ind -= 1 cheb = p[0:ind] sigAmp = p[ind] ind+=1 logAmp = p[ind] ind+=1 l = p[ind] phi = PhiParam(self.spectrum_id, self.order, self.chebyshevSpectrum.fix_c0, cheb, sigAmp, logAmp, l) self.update_Phi(phi) lnp = self.evaluate() self.logger.debug("Evaluated Phi parameters: {} {}".format(phi, lnp)) return lnp
def optimize_Cheb(self, *args): ''' Keeping the current Theta parameters fixed and assuming white noise, optimize the Chebyshev parameters ''' # self.fix_c0 = True if index == (len(DataSpectrum.wls) - 1) else False #Fix the last c0 # This is necessary if we want to update just a single order. if self.chebyshevSpectrum.fix_c0 & len(self.dataSpectrum.wls) > 1: p0 = np.zeros((self.npoly - 1)) else: self.chebyshevSpectrum.fix_c0 = False p0 = np.zeros((self.npoly)) def fprob(p): self.chebyshevSpectrum.update(p) lnp = self.evaluate() print(self.order, p, lnp) if lnp == -np.inf: return 1e99 else: return -lnp from scipy.optimize import fmin result = fmin(fprob, p0, maxiter=10000, maxfun=10000) print(self.order, result) # Due to a JSON bug, np.int64 type objects will get read twice, # and cause this routine to fail. Therefore we have to be careful # to convert these to ints. phi = PhiParam(spectrum_id=int(self.spectrum_id), order=int(self.order), fix_c0=self.chebyshevSpectrum.fix_c0, cheb=result) phi.save()
if args.initPhi: # Figure out how many models and orders we have i_last = len(Starfish.data["orders"]) - 1 for spec_id in range(len(Starfish.data["files"])): for i, order in enumerate(Starfish.data["orders"]): fix_c0 = True if i == i_last else False if fix_c0: cheb = np.zeros((Starfish.config["cheb_degree"] - 1, )) else: cheb = np.zeros((Starfish.config["cheb_degree"], )) # For each order, create a Phi with these values # Automatically reads all of the Phi parameters from config.yaml phi = PhiParam(spectrum_id=spec_id, order=int(order), fix_c0=fix_c0, cheb=cheb) # Write to CWD using predetermined format string phi.save() if args.optimize == "Cheb": model = parallel.OptimizeCheb(debug=True) # Now that the different processes have been forked, initialize them pconns, cconns, ps = parallel.initialize(model) # Initialize to the basics pars = ThetaParam.from_dict(Starfish.config["Theta"]) #Distribute the calculation to each process