def optimize_Cheb(self, *args): ''' Keeping the current Theta parameters fixed and assuming white noise, optimize the Chebyshev parameters ''' if self.chebyshevSpectrum.fix_c0: p0 = np.zeros((self.npoly - 1)) self.fix_c0 = True else: p0 = np.zeros((self.npoly)) self.fix_c0 = False def fprob(p): self.chebyshevSpectrum.update(p) lnp = self.evaluate() print(self.order, p, lnp) if lnp == -np.inf: return 1e99 else: return -lnp from scipy.optimize import fmin result = fmin(fprob, p0, maxiter=10000, maxfun=10000) print(self.order, result) # Due to a JSON bug, np.int64 type objects will get read twice, # and cause this routine to fail. Therefore we have to be careful # to convert these to ints. phi = PhiParam(spectrum_id=int(self.spectrum_id), order=int(self.order), fix_c0=self.chebyshevSpectrum.fix_c0, cheb=result) phi.save()
def optimize_Cheb(self, *args): ''' Keeping the current Theta parameters fixed and assuming white noise, optimize the Chebyshev parameters ''' # self.fix_c0 = True if index == (len(DataSpectrum.wls) - 1) else False #Fix the last c0 # This is necessary if we want to update just a single order. if self.chebyshevSpectrum.fix_c0 & len(self.dataSpectrum.wls) > 1: p0 = np.zeros((self.npoly - 1)) else: self.chebyshevSpectrum.fix_c0 = False p0 = np.zeros((self.npoly)) def fprob(p): self.chebyshevSpectrum.update(p) lnp = self.evaluate() print(self.order, p, lnp) if lnp == -np.inf: return 1e99 else: return -lnp from scipy.optimize import fmin result = fmin(fprob, p0, maxiter=10000, maxfun=10000) print(self.order, result) # Due to a JSON bug, np.int64 type objects will get read twice, # and cause this routine to fail. Therefore we have to be careful # to convert these to ints. phi = PhiParam(spectrum_id=int(self.spectrum_id), order=int(self.order), fix_c0=self.chebyshevSpectrum.fix_c0, cheb=result) phi.save()
class TestPhiParam: def setup_class(self): self.phiparam = PhiParam(spectrum_id=0, order=22, fix_c0=True, cheb=np.zeros((4,)), sigAmp=1.0, logAmp=-5.0, l=20., regions=np.ones((4, 3))) def test_save(self): self.phiparam.save(fname="phi_test.json") def test_load(self): load = PhiParam.load(Starfish.specfmt.format(0, 22) + "phi_test.json") print(load.spectrum_id) print(load.order) print(load.fix_c0) print(load.cheb) print(load.sigAmp) print(load.logAmp) print(load.l) print(load.regions)
class TestPhiParam: def setup_class(self): self.phiparam = PhiParam(spectrum_id=0, order=22, fix_c0=True, cheb=np.zeros((4, )), sigAmp=1.0, logAmp=-5.0, l=20., regions=np.ones((4, 3))) def test_save(self): self.phiparam.save(fname="phi_test.json") def test_load(self): load = PhiParam.load(Starfish.specfmt.format(0, 22) + "phi_test.json") print(load.spectrum_id) print(load.order) print(load.fix_c0) print(load.cheb) print(load.sigAmp) print(load.logAmp) print(load.l) print(load.regions)
for spec_id in range(len(Starfish.data["files"])): for i, order in enumerate(Starfish.data["orders"]): fix_c0 = True if i == i_last else False if fix_c0: cheb = np.zeros((Starfish.config["cheb_degree"] - 1, )) else: cheb = np.zeros((Starfish.config["cheb_degree"], )) # For each order, create a Phi with these values # Automatically reads all of the Phi parameters from config.yaml phi = PhiParam(spectrum_id=spec_id, order=int(order), fix_c0=fix_c0, cheb=cheb) # Write to CWD using predetermined format string phi.save() if args.optimize == "Cheb": model = parallel.OptimizeCheb(debug=True) # Now that the different processes have been forked, initialize them pconns, cconns, ps = parallel.initialize(model) # Initialize to the basics pars = ThetaParam.from_dict(Starfish.config["Theta"]) #Distribute the calculation to each process for ((spectrum_id, order_id), pconn) in pconns.items(): #Parse the parameters into what needs to be sent to each Model here. pconn.send(("LNPROB", pars))
# Figure out how many models and orders we have i_last = len(Starfish.data["orders"]) - 1 for spec_id in range(len(Starfish.data["files"])): for i, order in enumerate(Starfish.data["orders"]): fix_c0 = True if i==i_last else False if fix_c0: cheb = np.zeros((Starfish.config["cheb_degree"] - 1,)) else: cheb = np.zeros((Starfish.config["cheb_degree"],)) # For each order, create a Phi with these values # Automatically reads all of the Phi parameters from config.yaml phi = PhiParam(spectrum_id=spec_id, order=int(order), fix_c0=fix_c0, cheb=cheb) # Write to CWD using predetermined format string phi.save() if args.optimize == "Cheb": model = parallel_linear.OptimizeTheta(debug=True) # Now that the different processes have been forked, initialize them pconns, cconns, ps = parallel_linear.initialize(model) # Initialize to the basics pars = ThetaParam.from_dict(Starfish.config["Theta"]) #Distribute the calculation to each process for ((spectrum_id, order_id), pconn) in pconns.items(): #Parse the parameters into what needs to be sent to each Model here. pconn.send(("LNPROB", pars))