Example #1
0
def singleLocusSigmoid():
    x0=0.001;s=0.5;maxGen=100
    times_ = T.ivector("times"); x0_ = T.scalar("x0 ")
    S__=theano.shared(np.asarray(s, dtype = theano.config.floatX), 'S')
    pred_= sig_(0.5*S__*times_ + logit_(x0_))
    Feedforward_ = theano.function(inputs=[x0_,times_], outputs=pred_)
    pd.Series(Feedforward_(x0,range(maxGen))).plot();
    pd.Series(sig(0.5*s*np.arange(maxGen)+logit(x0))).plot()
Example #2
0
def singleMultiLocusHafVariableTime():
    from popgen.TimeSeries.RNN.Utils import Z
    x0=0.001;s=0.5;maxGen=100;numReplicates=3;times=np.tile(np.arange(maxGen+1),(numReplicates,1)).T.astype(np.float32)
    initC0 = np.arange(numReplicates,dtype=np.float32)*logit(x0)
    c__=theano.shared(initC0, 'c')
#     c__=theano.shared(np.asarray(logit(x0), dtype = theano.config.floatX), 'S')
    times_ = T.fmatrix();rep_ = T.iscalar();maxGenerations_ = T.ivector()
    target_ = (T.matrix(),T.vector())[numReplicates==1]
    S__=theano.shared(np.asarray(s, dtype = np.float32), 'S')
    loss_=  (target_[:maxGenerations_[rep_],rep_] - Z(T.nnet.sigmoid(0.5*S__*times_[:maxGenerations_[rep_],rep_]  +c__[rep_]),200,20))**2
    reps_=T.ivector()
    cost_, _ = theano.scan(lambda rep: (target_[:maxGenerations_[rep],rep] - times_[:maxGenerations_[rep],rep]).sum()  , sequences=reps_); cost_=cost_.sum()
    
    Loss_ = theano.function(inputs=[target_, times_, maxGenerations_, reps_], outputs=[results])
    
    times
    target=times+1
    target[:,1]+=1
    target[:,2]+=1
    target
    lastGenerationIndex=[10,20,30]
    reps=range(numReplicates)
    Loss_(target,times,lastGenerationIndex,reps)
    target
    import popgen.TimeSeries.RNN.MultiLocusHAFOptimizingAllVarsVariableTimeOld as RNN
    
    numReplicates=3 ; s=0.02;generationStep=100
    sim = Simulation.Simulation(numReplicates=numReplicates, s=s, generationStep=generationStep);
    sim.forwardSimulation();
    sim.getAverageHAF().plot()
    sim.getAverageHAF().diff().plot()
    y=sim.getAverageHAF();times=sim.getGenerationTimes()
    replicateIndex=range(sim.numReplicates)
    reload(RNN)
    n=2000;theta=20
    lastGenerationIndex=sim.filterTimeSamplesWithHighNegDer()
    rnn=RNN.MultiLocusHAFOptimizingAllVarsVariableTimeOld(sim.X0.min(),lastGenerationIndex=lastGenerationIndex, initS=sim.s,  initTheta=theta, times=sim.getGenerationTimes(), numReplicates=sim.numReplicates,initSviaLineSearch=False)
    rnn.Loss_(y.values,np.tile(times,(3,1)).T.astype(np.float32),n,list(lastGenerationIndex),replicateIndex)
    Z(sig(0.5*times*sim.s + logit(sim.X0.min())),n,theta)
    i=10
    j=0
    for j,i in enumerate(lastGenerationIndex):
        print ((Z(sig(0.5*times*sim.s + logit(sim.X0.min())),n,theta)[:i] - y.values[:i,j])**2).sum()
    print lastGenerationIndex
Example #3
0
def hardSweep():
    param=pd.read_pickle('/home/arya/out/vineet/param0.1_1.pd')
    reload(Simulation)
    s=0.01
    X0=param['initHaps'].mean(0)
    param['siteUnderSelection']=param['initHaps'].mean(0).argmin()
    param['s']=s
    param['numReplicates']=3
    param['maxGeneration']=500
    param['generationStep']=100
    param['startGeneration']=0
    X = Simulation.Simulation.forwardSimulationHardSweep(param)
    def sig(x): return 1./(1+np.exp(-x))
    if not param['startGeneration']: 
        x0=X0[param['siteUnderSelection']]
    else:
        x0=X[param['startGeneration']/param['generationStep'] -1,param['siteUnderSelection'],:].mean()
    x0
    c=np.log(1-x0)-np.log(x0)
    z=sig(s*np.array(range(param['startGeneration'],param['maxGeneration']+1,param['generationStep'])[1:])/2. -c)
    pd.DataFrame(np.append(X[:,3,:],z[:,None],axis=1)).plot()
    y= np.array([[np.linalg.norm(X[t,:,r])**2 for t in range(X.shape[0])] for r in range(param['numReplicates'])]).T
    
    y0=np.linalg.norm(X0)**2
    
    
    theta=Estimate.watterson(np.tile(param['initHaps'],(10,1)));n=2000
    theta=Estimate.watterson(param['initHaps']);n=200
    a=[(y0 ,Z(X0[3], n, theta))]
    for t in range(5):
        a.append((y[t] ,Z(X[t,3,:], n, theta)))
    
    pd.DataFrame(a)
    
    times=range(param['startGeneration'],param['maxGeneration']+1,param['generationStep'])[1:]
    times
    x0_ = T.scalar("x0")
    n_ = T.scalar("n")
    theta_ = T.scalar("theta")
    times_ = T.ivector("times")
    S__=theano.shared(np.asarray(s, dtype = theano.config.floatX), 'S')
    predall_, updatesRecurrence_ = theano.scan(lambda x_prev, s: (s*x_prev*x_prev+s*x_prev +2*x_prev)/(2*s*x_prev+2), outputs_info=x0_,non_sequences=S__,n_steps=times_[-1])
    pred_=Z(predall_[times_-1],n_,theta_) #we only have target at some generations e.g. 10,20,...
    Feedforward_ = theano.function(inputs=[x0_,times_,n_,theta_], outputs=pred_, updates=updatesRecurrence_)
    Feedforward_(x0,times,n,theta)
    x0
    y
    times
    Z(X[:,3,:],n,theta)
    theta,n
    z
    Z(z,2000,17)
    pd.DataFrame(np.append(X[:,3,:],z[:,None],axis=1))
Example #4
0
    def Nu0(s,t,nu0,n,theta): return Z(sig(np.array(t)*s/2 +logit(nu0)),n,theta)
     


    initNu0=0.005; final_momentum=0.9; initial_momentum=0.5;momentum_switchover=5;times=range(1,801);S=3;lr=1e-2;maxIter=10000;initS=0.05; initTheta= 20; numReplicates=3;n=2000
Example #5
0
 def Nu(s,t,nu0): return np.array([sig(t*s/2 +logit(nu)) for nu in nu0]).T
 def Nu0(s,t,nu0,n,theta): return Z(sig(np.array(t)*s/2 +logit(nu0)),n,theta)