def objFunc(par): # Set up the necessary functions def f(x): return -(-x**4 + 2 * par * x**2) def fgrad(x): return -(4 * x * (par - x**2)) U = potential(f, fgrad) H = Hamiltonian(U, lambda x: D2) eps = 0.0 def func(x, p): return eps * (p - H.seperatrix(x))**2 H.set_add_term(func) Pst = None rootPar = np.sqrt(par) #xRep = [-0.63,0.63] xRep = [z[20], z[-20]] try: val = 0. J = HJacobi_integrator(H, Pst) pT1 = pTransition(J) pT1.make(xRep[0], tt) pT2 = pTransition(J) pT2.make(xRep[1], tt) xx = J.xx """ fig = plt.figure() ax = fig.add_subplot(111) ax.plot(xx,pT1(xx)) ax.plot(xx,pT2(xx)) """ val = 0. for i in range(X.size - 1): x = X[i] xT = X[i + 1] if x < xRep[0]: val += np.log(pT1(xT)) elif x > xRep[1]: val += np.log(pT2(xT)) else: w = abs(x - xRep[0]) / (xRep[1] - xRep[0]) val += np.log(w * pT1(xT) + (1 - w) * pT2(xT)) return -val except: return np.inf
def evalLL(self, HObj, Pst): try: val = 0. for i in range(self.data.shape[0] - 1): J = HJacobi_integrator(HObj, Pst) pT = pTransition(J) pT.make(self.data[i], self.tt) val += -np.log(pT(self.data[i + 1])) return val except: return np.inf
def evalLL_wrep(self, HObj, Pst): try: val = 0. for i, R in zip(self.repData.inds, self.repData.Ireps): # Make Pt for i J = HJacobi_integrator(HObj, Pst) pT = pTransition(J) pT.make(self.data[i], self.tt) for j in R: if j != (self.data.shape[0] - 1): val += np.log(pT(self.data[j + 1])) return -val except: return np.inf
def ObjFunc(par): # Create a new add_term for the Hamiltonian object def func(x, p): return par * (p - H.seperatrix(x))**2 H.set_add_term(func) # Make transition PDF try: # Do something H.set_add_term(func) J = HJacobi_integrator(H, Pst) pT_1 = pTransition(J) pT_1.make(x0, tt) l = np.log(pT_1(R[:, -1])) return -np.sum(l) except: # Do something else return np.inf
def objFunc(par): # Set up the necessary functions def f(x): return -(-x**4 + 2 * par * x**2) def fgrad(x): return -(4 * x * (par - x**2)) U = potential(f, fgrad) H = Hamiltonian(U, lambda x: D2) Pst = None try: val = 0. for i in range(X.size - 1): J = HJacobi_integrator(H, Pst) pT = pTransition(J) pT.make(X[i], tt) val += -np.log(pT(X[i + 1])) return val except: return np.inf
res = minimize(ObjFunc, 0.1, method='Nelder-Mead') print res #class ares: # def __init__(self): # self.x = 0.07539063 #res = ares() def func(x, p): return res.x * (p - H.seperatrix(x))**2 H.set_add_term(func) J = HJacobi_integrator(H, Pst) pT = pTransition(J) pT.make(x0, tt) xx = np.linspace(-1.5, 1.5, 500) fig2 = plt.figure() ax = fig2.add_subplot(111) ax.plot(xx, pT(xx)) from sklearn import mixture clf = mixture.GaussianMixture(n_components=3, covariance_type='full') clf.fit(R[:, -1].reshape(NSim, 1)) p = np.zeros(xx.size) print clf.means_ print clf.covariances_
J = HJacobi_integrator(H, Pst) from scipy.integrate import odeint from scipy.misc import derivative x0 = 0.1 print derivative(lambda x: fgrad(x), x0=x0, dx=1e-6) def dXdt(X, t=0): v = -fgrad(X[0]) return np.array([v]) mode = odeint(dXdt, x0, np.linspace(0., T, 1000)) print "The mode is: ", mode[-1], np.sqrt(par) pT1 = pTransition(J) pT1.make(x0, tt) intPar = integrator_pars(delta=[0.3, 0.3], scale=1.5, dxTarget=0.01) pT2 = pTransition2(H, intPar) pT2.make(x0, T) xx = np.linspace(-1.5, 1.5, 1000) fig = plt.figure() ax = fig.add_subplot(111) ax.plot(xx, pT1(xx), 'b-') ax.plot(xx, pT2(xx), 'r-') plt.show()
H = Hamiltonian(U,lambda x: 0.5) Pst = pStationary(H) J = HJacobi_integrator(H,Pst) Jt_1 = J(tt,J0f) from JIntegrator import integrator_pars """ """ xx = J.xx fig = plt.figure() ax = fig.add_subplot(111) pT_1 = pTransition(J) pT_1.make(x0,tt) intPar = integrator_pars(delta=[0.3,0.3],scale=1.25,dxTarget=0.005) pT_2 = pTransition2(H,intPar) pT_2.make(x0,T) #ax.plot(xx,pT_1(xx),'b-') ax.plot(xx,pT_2(xx)) for eps in [0.03,0.04,0.05,0.06] : def func(x,p): return eps*(p-H.seperatrix(x))**2 print eps try: H.set_add_term(func)