Beispiel #1
0
def run(fquadrnts=None):
    quadrnts   = None
    n_qdrnts   = 1
    if fquadrnts is not None:
        if K == 4:
            quadrnts  = [[int(g_Ms[0]*fquadrnts[0][0])], [int(g_Ms[1]*fquadrnts[1][0])], [int(g_Ms[2]*fquadrnts[2][0])], [int(g_Ms[3]*fquadrnts[3][0])]]
            n_qdrnts = (len(quadrnts[0])+1) * (len(quadrnts[1])+1) * (len(quadrnts[2])+1) * (len(quadrnts[3])+1)
        elif K == 1:
            quadrnts  = [[int(g_Ms[0]*fquadrnts[0][0])]]
            n_qdrnts = (len(quadrnts[0])+1)

    run_os, run_es, run_vs, lftovr_os, lftovr_es = _Gu.get_obs_exp_v(g_Ms, expctd, chi2_boxes_mk, 6., quadrnts=quadrnts)

    fp = open("%(df)s/exp_obs%(qd)s.txt" % {"df" : outdirN, "qd" : n_qdrnts}, "w+")
    fp.write("#  quadrants %s\n" % str(fquadrnts))
    fp.write("#  ks D, pv\n")

    fp.write("%(d).3e  %(pv).3e -1\n" % {"d" : ks_D, "pv" : ks_pv})

    fp.write("#  chi2, pv, dg freedom  # quadrnt#, \n")
    for nq in xrange(n_qdrnts):
        reslt   = _ss.chisquare(run_os[nq], run_es[nq])
        fp.write("%(chi2).3e  %(pv).3e  %(n)d  #  nq %(nq)d\n" % {"chi2" : reslt[0], "pv" : reslt[1], "n" : len(run_es[nq]), "nq" : nq})
    fp.close()

    lftovr = _N.empty((n_qdrnts, 2))
    lftovr[:, 0] = lftovr_os
    lftovr[:, 1] = lftovr_es

    _U.savetxtWCom("%(df)s/lftovrs%(qd)d.txt" % {"df" : outdirN, "qd" : n_qdrnts}, lftovr, fmt="%d %.3f", delimiter=" ", com=("# observed, expeceted, quadrants %s" % str(fquadrnts)))
Beispiel #2
0
bfn  = "u" if (mvPat == UNIF) else "n"
if (mvPat == NUNIF) and (Amx > 0):
    bfn = "b"

bfn += "n" if (mSM > 0) else "s"

iInd = 0
while not bFnd:
    iInd += 1
    fn = "../DATA/%(bfn)s%(iI)d.dat" % {"bfn" : bfn, "iI" : iInd}
    fnocc="../DATA/%(bfn)s%(iI)docc.png" % {"bfn" : bfn, "iI" : iInd}

    if not os.access(fn, os.F_OK):  # file exists
        bFnd = True
        
dat = _N.zeros((NT, 5))
dat[:, 0] = pths
dat[sts, 1] = 1
dat[:, 2] = ctr
dat[:, 3] = sx
dat[:, 4] = l0

com = "#  f=%(mx).3f   q2=%(q2).4f   l0=%(l0).4f" % {"mx" : mx, "q2" : _N.mean(sx), "l0" : _N.mean(l0)}
_U.savetxtWCom("%s" % fn, dat, fmt="%.4f %d %.4f %.4f %.4f", delimiter=" ", com=com)
print "created %s" % fn

fig = _plt.figure()
_plt.hist(dat[:, 0], bins=_N.linspace(0, 3, 61), color="black")
_plt.savefig(fnocc)
_plt.close()
Beispiel #3
0
def stochasticAssignment(oo, epc, it, Msc, M, K, l0, f, q2, u, Sg, _f_u, _u_u, _f_q2, _u_Sg, Asts, t0, mASr, xASr, rat, econt, gz, qdrMKS, freeClstr, hashthresh, cmp2Existing, nthrds=1):
    #  Msc   Msc signal clusters
    #  M     all clusters, including nz clstr.  M == Msc when not using nzclstr
    #  Gibbs sampling
    #  parameters l0, f, q2
    #  mASr, xASr   just the mark, position of spikes btwn t0 and t1
    #qdrMKS2 = _N.empty(qdrMKS.shape)
    t1 = _tm.time()
    nSpks = len(Asts)
    twpi = 2*_N.pi

    Kp1      = K+1
    #rat      = _N.zeros(M+1)
    pc       = _N.zeros(M)

    ur         = u.reshape((M, 1, K))
    fr         = f.reshape((M, 1))    # centers
    #print q2
    iq2        = 1./q2
    iSg        = _N.linalg.inv(Sg)
    iq2r       = iq2.reshape((M, 1))  
    try:
        ##  warnings because l0 is 0
        isN = _N.where(q2 <= 0)[0]
        if len(isN) > 0:
            q2[isN] = 0.3

        is0 = _N.where(l0 <= 0)[0]
        if len(is0) > 0:
            l0[is0] = 0.001

        pkFR       = _N.log(l0) - 0.5*_N.log(twpi*q2)   #  M
    except RuntimeWarning:
        print "WARNING"
        print l0
        print q2

    mkNrms = _N.log(1/_N.sqrt(twpi*_N.linalg.det(Sg)))
    mkNrms = mkNrms.reshape((M, 1))   #  M x 1

    rnds       = _N.random.rand(nSpks)

    pkFRr      = pkFR.reshape((M, 1))
    dmu        = (mASr - ur)     # mASr 1 x N x K,     ur  is M x 1 x K
    N          = mASr.shape[1]
    #t2 = _tm.time()
    #_N.einsum("mnj,mjk,mnk->mn", dmu, iSg, dmu, out=qdrMKS)
    #t3 = _tm.time()
    _fm.multi_qdrtcs_par_func(dmu, iSg, qdrMKS, M, N, K, nthrds=nthrds)

    #  fr is    M x 1, xASr is 1 x N, iq2r is M x 1
    #qdrSPC     = (fr - xASr)*(fr - xASr)*iq2r  #  M x nSpks   # 0.01s
    qdrSPC     = _N.empty((M, N))
    _hcb.hc_bcast1(fr, xASr, iq2r, qdrSPC, M, N)

    ###  how far is closest cluster to each newly observed mark

    #  mAS = mks[Asts+t0] 
    #  xAS = x[Asts + t0]   #  position @ spikes

    if cmp2Existing:   #  compare only non-hash spikes and non-hash clusters
        # realCl = _N.where(freeClstr == False)[0]
        # print freeClstr.shape
        # print realCl.shape

        abvthrEachCh = mASr[0] > hashthresh    #  should be NxK of
        abvthrAtLeast1Ch = _N.sum(abvthrEachCh, axis=1) > 0   # N x K
        newNonHashSpks   = _N.where(abvthrAtLeast1Ch)[0]

        newNonHashSpksMemClstr = _N.ones(len(newNonHashSpks), dtype=_N.int) * (M-1)   #  initially, assign all of them to noise cluster

        #print "spikes not hash"
         #print abvthrInds
        abvthrEachCh = u[0:Msc] > hashthresh  #  M x K  (M includes noise)
        abvthrAtLeast1Ch = _N.sum(abvthrEachCh, axis=1) > 0
        
        knownNonHclstrs  = _N.where(abvthrAtLeast1Ch & (freeClstr == False) & (q2[0:Msc] < wdSpc))[0]
        

        #print "clusters not hash"

        #  Place prior for freeClstr near new non-hash spikes that are far 
        #  from known clusters that are not hash clusters 


        nNrstMKS_d = _N.sqrt(_N.min(qdrMKS[knownNonHclstrs], axis=0)/K)  #  dim len(sts)
        nNrstSPC_d = _N.sqrt(_N.min(qdrSPC[knownNonHclstrs], axis=0))
        #  for each spike, distance to nearest non-hash cluster
        # print nNrstMKS_d
        # print nNrstSPC_d
        # print "=============="
        s = _N.empty((len(newNonHashSpks), 3))
        #  for each spike, distance to nearest cluster
        s[:, 0] = newNonHashSpks
        s[:, 1] = nNrstMKS_d[newNonHashSpks]
        s[:, 2] = nNrstSPC_d[newNonHashSpks]
        _N.savetxt(resFN("qdrMKSSPC%d" % epc, dir=oo.outdir), s, fmt="%d %.3e %.3e")

        dMK     = nNrstMKS_d[newNonHashSpks]
        dSP     = nNrstSPC_d[newNonHashSpks]

        ###  assignment into 

        farMKinds = _N.where(dMK > 4)[0]    # 
        #  mean of prior for center - mean of farMKinds
        #  cov  of prior for center - how certain am I of mean?  
        farSPinds = _N.where(dSP > 4)[0]  #  4 std. deviations away

        farMKSPinds = _N.union1d(farMKinds, farSPinds)
        print farMKinds
        print newNonHashSpks
        
        ##  points in newNonHashSpks but not in farMKinds
        notFarMKSPinds = _N.setdiff1d(_N.arange(newNonHashSpks.shape[0]), farMKSPinds)

        farMKSP = _N.empty((len(farMKSPinds), K+1))
        farMKSP[:, 0]  = xASr[0, newNonHashSpks[farMKSPinds]]
        farMKSP[:, 1:] = mASr[0, newNonHashSpks[farMKSPinds]]
        notFarMKSP = _N.empty((len(notFarMKSPinds), K+1))
        notFarMKSP[:, 0]  = xASr[0, newNonHashSpks[notFarMKSPinds]]
        notFarMKSP[:, 1:] = mASr[0, newNonHashSpks[notFarMKSPinds]]

        # farSP = _N.empty((len(farSPinds), K+1))
        # farMK = _N.empty((len(farMKinds), K+1))
        # farSP[:, 0]  = xASr[0, farSPinds]
        # farSP[:, 1:] = mASr[0, farSPinds]
        # farMK[:, 0]  = xASr[0, farMKinds]
        # farMK[:, 1:] = mASr[0, farMKinds]

        minK = 1
        maxK = farMKSPinds.shape[0] / K
        maxK = maxK if (maxK < 6) else 6

        freeClstrs = _N.where(freeClstr == True)[0]
        if maxK >= 2:
            print "coming in here"
            #labs, bics, bestLab, nClstrs = _oT.EMBICs(farMKSP, minK=minK, maxK=maxK, TR=1)
            labs, labsH, clstrs = emMKPOS_sep1B(farMKSP, None, TR=1, wfNClstrs=[[1, 4], [1, 4]], spNClstrs=[[1, 4], [1, 3]])
            nClstrs = clstrs[0]
            bestLab    = labs

            cls = clrs.get_colors(nClstrs)

            _U.savetxtWCom(resFN("newSpksMKSP%d" % epc, dir=oo.outdir), farMKSP, fmt="%.3e %.3e %.3e %.3e %.3e", com=("# number of clusters %d" % nClstrs))
            _U.savetxtWCom(resFN("newSpksMKSP_nf%d" % epc, dir=oo.outdir), notFarMKSP, fmt="%.3e %.3e %.3e %.3e %.3e", com=("# number of clusters %d" % nClstrs))

            L = len(freeClstrs)
            
            unqLabs = _N.unique(bestLab)

            upto    = nClstrs if nClstrs < L else L  #  this should just count large clusters
            ii  = -1
            fig = _plt.figure()
            
            for fid in unqLabs[0:upto]:
                iths = farMKSPinds[_N.where(bestLab == fid)[0]]
                ths = newNonHashSpks[iths]

                for w in xrange(K):
                    fig.add_subplot(2, 2, w+1)
                    _plt.scatter(xASr[0, ths], mASr[0, ths, w], color=cls[ii])

                if len(ths) > K:
                    ii += 1
                    im = freeClstrs[ii]   # Asts + t0 gives absolute time
                    newNonHashSpksMemClstr[iths] = im

                    _u_u[im]  = _N.mean(mASr[0, ths], axis=0)
                    u[im]     = _u_u[im]
                    _f_u[im]  = _N.mean(xASr[0, ths], axis=0)
                    f[im]     = _f_u[im]
                    q2[im]    = _N.std(xASr[0, ths], axis=0)**2 * 9
                    #  l0 = Hz * sqrt(2*_N.pi*q2)
                    l0[im]    =   10*_N.sqrt(q2[im])
                    _f_q2[im] = 1
                    _u_Sg[im] = _N.cov(mASr[0, ths], rowvar=0)*25
                    print "ep %(ep)d  new   cluster #  %(m)d" % {"ep" : epc, "m" : im}
                    print _u_u[im]
                    print _f_u[im]
                    print _f_q2[im]
                else:
                    print "too small    this prob. doesn't represent a cluster"

            _plt.savefig("newspks%d" % epc)


            # #######  known clusters
            # for fid in unqLabs[0:upto]:
            #     iths = farMKSPinds[_N.where(bestLab == fid)[0]]
            #     ths = newNonHashSpks[iths]

            #     for w in xrange(K):
            #         fig.add_subplot(2, 2, w+1)
            #         _plt.scatter(xASr[0, ths], mASr[0, ths, w], color=cls[ii])

            #     if len(ths) > K:
            #         ii += 1
            #         im = freeClstrs[ii]   # Asts + t0 gives absolute time
            #         newNonHashSpksMemClstr[iths] = im

            #         _u_u[im]  = _N.mean(mASr[0, ths], axis=0)
            #         u[im]     = _u_u[im]
            #         _f_u[im]  = _N.mean(xASr[0, ths], axis=0)
            #         f[im]     = _f_u[im]
            #         q2[im]    = _N.std(xASr[0, ths], axis=0)**2 * 9
            #         #  l0 = Hz * sqrt(2*_N.pi*q2)
            #         l0[im]    =   10*_N.sqrt(q2[im])
            #         _f_q2[im] = 1
            #         _u_Sg[im] = _N.cov(mASr[0, ths], rowvar=0)*25
            #         print "ep %(ep)d  new   cluster #  %(m)d" % {"ep" : epc, "m" : im}
            #         print _u_u[im]
            #         print _f_u[im]
            #         print _f_q2[im]
            #     else:
            #         print "too small    this prob. doesn't represent a cluster"

            # _plt.savefig("newspks%d" % epc)


        else:  #  just one cluster
            im = freeClstrs[0]   # Asts + t0 gives absolute time

            _u_u[im]  = _N.mean(mASr[0, newNonHashSpks[farMKSPinds]], axis=0)
            _f_u[im]  = _N.mean(xASr[0, newNonHashSpks[farMKSPinds]], axis=0)
            _u_Sg[im] = _N.cov(mASr[0, newNonHashSpks[farMKSPinds]], rowvar=0)*16
            _f_q2[im] = _N.std(xASr[0, newNonHashSpks[farMKSPinds]], axis=0)**2 * 16

        # ##  kernel density estimate
        # xs  = _N.linspace(-6, 6, 101)
        # xsr = xs.reshape(101, 1)
        # isg2= 1/(0.1**2)   #  spatial kernel bandwidth

        # # fig = _plt.figure(figsize=(6, 9))
        # # fig.add_subplot(1, 2, 1)
        # # _plt.scatter(xASr[0, newNonHashSpks[farMKinds]], mASr[0, newNonHashSpks[farMKinds], 0])
        # # fig.add_subplot(1, 2, 2)
        # # _plt.scatter(xASr[0, newNonHashSpks[farSPinds]], mASr[0, newNonHashSpks[farSPinds], 0])

        # freeClstrs = _N.where(freeClstr == True)[0]
        # L = len(freeClstrs)

        # jjj = 0
        # if (len(farSPinds) >= Kp1) and (len(farMKinds) >= Kp1):
        #     jjj = 1
        #     l1 = L/2

        #     for l in xrange(l1):  # mASr  is 1 x N x K
        #         im = freeClstrs[l]   # Asts + t0 gives absolute time
        #         _u_u[im]  = _N.mean(mASr[0, newNonHashSpks[farMKinds]], axis=0)
        #         y   = _N.exp(-0.5*(xsr - xASr[0, newNonHashSpks[farMKinds]])**2 * isg2)
        #         yc  = _N.sum(y, axis=1)
        #         ix  = _N.where(yc == _N.max(yc))[0][0]
        #         _f_u[im]  = xs[ix]
        #         _u_Sg[im] = _N.cov(mASr[0, newNonHashSpks[farMKinds]], rowvar=0)*30
        #         _f_q2[im] = _N.std(xASr[0, newNonHashSpks[farMKinds]], axis=0)**2 * 30
        #     # _plt.figure()
        #     # _plt.plot(xs, yc)

        #     for l in xrange(l1, L):
        #         im = freeClstrs[l]   # Asts + t0 gives absolute time
        #         _u_u[im]  = _N.mean(mASr[0, newNonHashSpks[farSPinds]], axis=0)
        #         y   = _N.exp(-0.5*(xsr - xASr[0, newNonHashSpks[farSPinds]])**2 * isg2)
        #         yc  = _N.sum(y, axis=1)
        #         ix  = _N.where(yc == _N.max(yc))[0][0]
        #         _f_u[im]  = xs[ix]
        #         _u_Sg[im] = _N.cov(mASr[0, newNonHashSpks[farSPinds]], rowvar=0)*30
        #         _f_q2[im] = _N.std(xASr[0, newNonHashSpks[farSPinds]], axis=0)**2 * 30
        #     # _plt.figure()
        #     # _plt.plot(xs, yc)

        # elif (len(farSPinds) >= Kp1) and (len(farMKinds) < Kp1):
        #     jjj = 2
        #     for l in xrange(L):
        #         im = freeClstrs[l]   # Asts + t0 gives absolute time
        #         _u_u[im]  = _N.mean(mASr[0, newNonHashSpks[farSPinds]], axis=0)
        #         y   = _N.exp(-0.5*(xsr - xASr[0, newNonHashSpks[farSPinds]])**2 * isg2)
        #         yc  = _N.sum(y, axis=1)
        #         ix  = _N.where(yc == _N.max(yc))[0][0]
        #         _f_u[im]  = xs[ix]
        #         _u_Sg[im] = _N.cov(mASr[0, newNonHashSpks[farSPinds]], rowvar=0)*30
        #         _f_q2[im] = _N.std(xASr[0, newNonHashSpks[farSPinds]], axis=0)**2 * 30
        #     # _plt.figure()
        #     # _plt.plot(xs, yc)

        # elif (len(farSPinds) < Kp1) and (len(farMKinds) >= Kp1):
        #     jjj = 3
        #     for l in xrange(L):
        #         im = freeClstrs[l]   # Asts + t0 gives absolute time
        #         _u_u[im]  = _N.mean(mASr[0, newNonHashSpks[farMKinds]], axis=0)
        #         y   = _N.exp(-0.5*(xsr - xASr[0, newNonHashSpks[farMKinds]])**2 * isg2)
        #         yc  = _N.sum(y, axis=1)
        #         ix  = _N.where(yc == _N.max(yc))[0][0]
        #         _f_u[im]  = xs[ix]
        #         _u_Sg[im] = _N.cov(mASr[0, newNonHashSpks[farMKinds]], rowvar=0)*30
        #         _f_q2[im] = _N.std(xASr[0, newNonHashSpks[farMKinds]], axis=0)**2 * 30
        #     # _plt.figure()
        #     # _plt.plot(xs, yc)

        """
        print "^^^^^^^^"
        print freeClstrs
        print "set priors for freeClstrs   %d" % jjj
        #print _u_u[freeClstrs]
        #print _u_Sg[freeClstrs]
        print _f_u[freeClstrs]
        print _f_q2[freeClstrs]
        """

        #if len(farSPinds) > 10:


        #  set the priors of the freeClusters to be near the far spikes


    ####  outside cmp2Existing here
    #   (Mx1) + (Mx1) - (MxN + MxN)
    #cont       = pkFRr + mkNrms - 0.5*(qdrSPC + qdrMKS)
    cont = _N.empty((M, N))
    _hcb.hc_qdr_sum(pkFRr, mkNrms, qdrSPC, qdrMKS, cont, M, N)

    mcontr     = _N.max(cont, axis=0).reshape((1, nSpks))  
    cont       -= mcontr
    _N.exp(cont, out=econt)

    for m in xrange(M):
        rat[m+1] = rat[m] + econt[m]

    rat /= rat[M]
    """
    # print f
    # print u
    # print q2
    # print Sg
    # print l0
    """

    # print rat

    M1 = rat[1:] >= rnds
    M2 = rat[0:-1] <= rnds

    gz[it] = (M1&M2).T

    if cmp2Existing:
        #  gz   is ITERS x N x Mwowonz   (N # of spikes in epoch)
        gz[it, newNonHashSpks] = False   #  not a member of any of them
        gz[it, newNonHashSpks, newNonHashSpksMemClstr] = True
Beispiel #4
0
def create(Lx,
           Hx,
           N,
           mvPat,
           RTs,
           frqmx,
           Amx,
           pT,
           l_sx_chpts,
           l_l0_chpts,
           l_ctr_chpts,
           mk_chpts,
           Covs,
           LoHis,
           km,
           bckgrdLam=None,
           script="no info",
           addShortStops=False,
           stops=10,
           stopDur=500,
           thresh=None):
    """
    km  tells me neuron N gives rise to clusters km[N]  (list)
    bckgrd is background spike rate  (Hz)
    """
    global UNIF, NUNIF
    #####  First check that the number of neurons and PFs all consistent.
    nNrnsSX = len(l_sx_chpts)
    nNrnsL0 = len(l_l0_chpts)
    nNrnsCT = len(l_ctr_chpts)
    nNrnsMK = len(mk_chpts)
    nNrnsMKA = LoHis.shape[0]
    nNrnsMKC = Covs.shape[0]

    if not (nNrnsSX == nNrnsL0 == nNrnsCT == nNrnsMK == nNrnsMKA == nNrnsMKC):
        print "Number of neurons not consistent"
        return None
    nNrns = nNrnsSX

    if not (LoHis.shape[1] == Covs.shape[1] == Covs.shape[2]):
        print "Covariance of LoHis not correct"
        return None
    K = LoHis.shape[1]

    PFsPerNrn = _N.zeros(nNrns, dtype=_N.int)

    sx_chpts = []
    l0_chpts = []
    ctr_chpts = []
    M = 0
    nrnNum = []
    for nrn in xrange(nNrns):
        #  # of place fields for neuron nrn
        nPFsSX = len(l_sx_chpts[nrn])
        nPFsL0 = len(l_l0_chpts[nrn])
        nPFsCT = len(l_ctr_chpts[nrn])
        sx_chpts.extend(l_sx_chpts[nrn])
        l0_chpts.extend(l_l0_chpts[nrn])
        ctr_chpts.extend(l_ctr_chpts[nrn])

        if not (nPFsSX == nPFsL0 == nPFsCT):
            print "Number of PFs for neuron %d not consistent" % nrn
            return None
        M += len(l_ctr_chpts[nrn])
        nrnNum += [nrn] * nPFsSX
        PFsPerNrn[nrn] = nPFsSX

    #  M = # of clusters  (in mark + pos space)
    #  nNrns = # of neurons
    ####  build data
    Ns = _N.empty(RTs, dtype=_N.int)
    if mvPat == NUNIF:

        for rt in xrange(RTs):
            Ns[rt] = N * ((1 - pT) + pT * _N.random.rand())
    else:
        Ns[:] = N

    NT = _N.sum(Ns)  #  total time we have data
    pths = _N.empty(NT)

    x01 = _N.linspace(0, 1, len(pths))
    x01 = x01.reshape((1, NT))
    plastic = False

    ##########  nonstationary center width
    #  sxt  should be (M x NT)
    sxt = _N.empty((M, NT))
    for m in xrange(M):  # sxts time scale
        sxt[m] = createSmoothedPath(sx_chpts[m], NT)
        if len(sx_chpts[m]) > 1: plastic = True

    sx = sxt**2  #  var of firing rate function

    ##########  nonstationary center height l0
    #  f is NT x M
    l0 = _N.empty((M, NT))
    for m in xrange(M):
        l0[m] = createSmoothedPath(l0_chpts[m], NT)
        if len(l0_chpts[m]) > 1: plastic = True

    f = l0 / _N.sqrt(2 * _N.pi * sx)  #  f*dt

    ##########  nonstationary center location
    ctr = _N.empty((M, NT))
    for m in xrange(M):
        ctr[m] = createSmoothedPath(ctr_chpts[m], NT)
        if len(ctr_chpts[m]) > 1: plastic = True

    if K > 0:
        ##########  nonstationary marks
        mk_MU = _N.empty((nNrns, NT, K))
        for n in xrange(nNrns):
            mk_MU[n] = createSmoothedPathK(mk_chpts[n], NT, K, LoHis[n])

            if len(mk_chpts[n]) > 1: plastic = True

    if mvPat == NUNIF:
        now = 0
        for rt in xrange(RTs):
            N = Ns[rt]  #  each traverse slightly different duration
            rp = _N.random.rand(N / 100)
            x = _N.linspace(Lx, Hx, N)
            xp = _N.linspace(Lx, Hx, N / 100)

            r = _N.interp(x, xp, rp)  #  creates a velocity vector
            #  create movement without regard for place field
            frqmxR = _N.abs(frqmx * (1 + 0.25 * _N.random.randn()))
            _N.linspace(0, 1, N, endpoint=False)
            rscld_t = _N.random.rand(N)  #  rscld_t
            rscld_t /= (_N.max(rscld_t) * 1.01)
            rscld_t.sort()
            phi0 = _N.random.rand() * 2 * _N.pi

            r += _N.exp(Amx * _N.sin(2 * _N.pi * rscld_t * frqmxR + phi0))
            pth = _N.zeros(N + 1)
            for n in xrange(1, N + 1):
                pth[n] = pth[n - 1] + r[n - 1]

            pth /= (pth[-1] - pth[0])
            pth *= (Hx - Lx)
            pth += Lx

            pths[now:now + N] = pth[0:N]
            now += N
    else:
        now = 0
        x = _N.linspace(Lx, Hx, N)
        for rt in xrange(RTs):
            N = Ns[rt]
            pths[now:now + N] = x
            now += N

    if addShortStops:
        for ist in xrange(stops):
            done = False
            while not done:
                t0 = int(_N.random.rand() * NT)
                t1 = t0 + int(stopDur * (1 + 0.1 * _N.random.randn()))
                if _N.abs(_N.max(_N.diff(pths[t0:t1]))) < 0.05 * (Hx - Lx):
                    done = True  #  not crossing origin

            pths[t0:t1] = _N.mean(pths[t0:t1])

    ###  now calculate firing rates
    dt = 0.001
    fdt = f * dt
    #  change place field location
    Lam = f * dt * _N.exp(-0.5 * (pths - ctr)**2 / sx)

    #_N.savetxt("lam", Lam.T)
    #_N.savetxt("pths", pths)

    rnds = _N.random.rand(M, NT)

    #dat = _N.zeros((NT, 2 + K))
    dat = _N.zeros((NT, 2 + K))
    dat[:, 0] = pths
    datNghbr = _N.zeros(
        (NT, 2 + K))  ##  if spk in bin already exists, nxt door
    datNghbr[:, 0] = pths

    for m in xrange(M):
        sts = _N.where(rnds[m] < Lam[m])[0]  #  spikes from this neuron
        alrdyXst = _N.where(dat[:, 1] == 1)[0]  #  places where synchronous
        ndToMove = _N.intersect1d(alrdyXst, sts)
        dntMove = _N.setdiff1d(sts, ndToMove)  #  empty slots
        nonsynch = _N.empty(len(sts))
        datNghbr[dntMove, 1] = 1
        nonsynch[0:len(dntMove)] = dntMove
        print len(ndToMove)

        iStart = len(dntMove)  #  in nonsynch

        for iOcpd in ndToMove:  # occupied
            bDone = False
            while not bDone:
                iOcpd += 1
                if datNghbr[iOcpd, 1] == 0:
                    datNghbr[iOcpd, 1] = 1
                    nonsynch[iStart] = iOcpd
                    iStart += 1
                    bDone = True

        snonsynch = _N.sort(nonsynch)
        dat[sts, 1] = 1

        nrn = nrnNum[m]

        if K > 0:
            for t in xrange(len(sts)):
                obsMrk = _N.random.multivariate_normal(mk_MU[nrn, sts[t]],
                                                       Covs[nrn],
                                                       size=1)
                dat[sts[t], 2:] = obsMrk
                datNghbr[nonsynch[t], 2:] = obsMrk

        #  now noise spikes
        if bckgrdLam is not None:
            nzsts = _N.where(rnds[m] < (bckgrdLam * dt) / float(M))[0]
            dat[nzsts, 1] = 1
            nrn = nrnNum[m]
            if K > 0:
                for t in xrange(len(nzsts)):
                    dat[nzsts[t],
                        2:] = _N.random.multivariate_normal(mk_MU[nrn,
                                                                  nzsts[t]],
                                                            Covs[nrn],
                                                            size=1)

    if thresh is not None:
        sts = _N.where(dat[:, 1] == 1)[0]
        nID, nC = _N.where(dat[sts, 2:] < thresh)

        swtchs = _N.zeros((len(sts), K))
        swtchs[nID, nC] = 1  #  for all cells, all components below hash == 1

        swtchsK = _N.sum(swtchs, axis=1)

        blw_thrsh_all_chs = _N.where(swtchsK == K)[0]
        abv_thr = _N.setdiff1d(_N.arange(len(sts)), blw_thrsh_all_chs)
        print "below thresh in all channels  %(1)d / %(2)d" % {
            "1": len(blw_thrsh_all_chs),
            "2": len(sts)
        }
        dat[sts[blw_thrsh_all_chs], 1:] = 0

    bFnd = False

    ##  us un   uniform sampling of space, stationary or non-stationary place field
    ##  ns nn   non-uni sampling of space, stationary or non-stationary place field
    ##  bs bb   biased and non-uni sampling of space

    bfn = "" if (M == 1) else ("%d" % M)

    if mvPat == UNIF:
        bfn += "u"
    else:
        bfn += "b" if (Amx > 0) else "n"

    bfn += "n" if plastic else "s"

    iInd = 0
    while not bFnd:
        iInd += 1
        dd = os.getenv("__EnDeDataDir__")
        fn = "%(dd)s/%(bfn)s%(iI)d.dat" % {"bfn": bfn, "iI": iInd, "dd": dd}
        fnocc = "%(dd)s/%(bfn)s%(iI)docc.png" % {
            "bfn": bfn,
            "iI": iInd,
            "dd": dd
        }
        fnprm = "%(dd)s/%(bfn)s%(iI)d_prms.pkl" % {
            "bfn": bfn,
            "iI": iInd,
            "dd": dd
        }

        if not os.access(fn, os.F_OK):  # file exists
            bFnd = True

    smk = " %.4f" * K
    _U.savetxtWCom("%s" % fn,
                   dat,
                   fmt=("%.4f %d" + smk),
                   delimiter=" ",
                   com="#  script=%s.py" % script)
    _U.savetxtWCom("%s_NS.dat" % fn[:-4],
                   datNghbr,
                   fmt=("%.4f %d" + smk),
                   delimiter=" ",
                   com="#  script=%s.py" % script)

    pcklme = {}

    pcklme["l0"] = l0[:, ::100]
    pcklme["f"] = ctr[:, ::100]
    pcklme["sq2"] = sx[:, ::100]
    pcklme["u"] = mk_MU[:, ::100]
    pcklme["covs"] = Covs
    pcklme["intv"] = 100
    pcklme["km"] = km

    dmp = open(fnprm, "wb")
    pickle.dump(pcklme, dmp, -1)
    dmp.close()

    print "created %s" % fn

    fig = _plt.figure()
    _plt.hist(dat[:, 0], bins=_N.linspace(Lx, Hx, 101), color="black")
    _plt.savefig(fnocc)
    _plt.close()
Beispiel #5
0
def create(Lx, Hx, N, mvPat, RTs, frqmx, Amx, pT, l_sx_chpts, l_l0_chpts, l_ctr_chpts, mk_chpts, Covs, LoHis, km, bckgrdLam=None, script="no info", addShortStops=False, stops=10, stopDur=500, thresh=None):
    """
    km  tells me neuron N gives rise to clusters km[N]  (list)
    bckgrd is background spike rate  (Hz)
    """
    global UNIF, NUNIF
    #####  First check that the number of neurons and PFs all consistent.
    nNrnsSX = len(l_sx_chpts)
    nNrnsL0 = len(l_l0_chpts)
    nNrnsCT = len(l_ctr_chpts)
    nNrnsMK = len(mk_chpts)
    nNrnsMKA= LoHis.shape[0]
    nNrnsMKC= Covs.shape[0]

    if not (nNrnsSX == nNrnsL0 == nNrnsCT == nNrnsMK == nNrnsMKA == nNrnsMKC):
        print "Number of neurons not consistent"
        return None
    nNrns = nNrnsSX

    if not (LoHis.shape[1] == Covs.shape[1] == Covs.shape[2]):
        print "Covariance of LoHis not correct"
        return None
    K = LoHis.shape[1]
    
    PFsPerNrn = _N.zeros(nNrns, dtype=_N.int)

    sx_chpts  = []
    l0_chpts  = []
    ctr_chpts = []
    M         = 0
    nrnNum    = []
    for nrn in xrange(nNrns):
        #  # of place fields for neuron nrn
        nPFsSX = len(l_sx_chpts[nrn])
        nPFsL0 = len(l_l0_chpts[nrn])
        nPFsCT = len(l_ctr_chpts[nrn])
        sx_chpts.extend(l_sx_chpts[nrn])
        l0_chpts.extend(l_l0_chpts[nrn])
        ctr_chpts.extend(l_ctr_chpts[nrn])

        if not (nPFsSX == nPFsL0 == nPFsCT):
            print "Number of PFs for neuron %d not consistent" % nrn
            return None
        M += len(l_ctr_chpts[nrn])
        nrnNum += [nrn]*nPFsSX
        PFsPerNrn[nrn] = nPFsSX

    #  M = # of clusters  (in mark + pos space)  
    #  nNrns = # of neurons
    ####  build data
    Ns     = _N.empty(RTs, dtype=_N.int)
    if mvPat == NUNIF:

        for rt in xrange(RTs):
            Ns[rt] = N*((1-pT) + pT*_N.random.rand())
    else:
        Ns[:] = N

    NT     = _N.sum(Ns)     #  total time we have data
    pths    = _N.empty(NT)

    x01    = _N.linspace(0, 1, len(pths))
    x01    = x01.reshape((1, NT))
    plastic = False

    ##########  nonstationary center width
    #  sxt  should be (M x NT)
    sxt   = _N.empty((M, NT))
    for m in xrange(M):  # sxts time scale
        sxt[m] = createSmoothedPath(sx_chpts[m], NT)
        if len(sx_chpts[m]) > 1:  plastic = True

    sx    = sxt**2     #  var of firing rate function

    ##########  nonstationary center height l0
    #  f is NT x M
    l0   = _N.empty((M, NT))
    for m in xrange(M):
        l0[m] = createSmoothedPath(l0_chpts[m], NT)
        if len(l0_chpts[m]) > 1:  plastic = True

    f     = l0/_N.sqrt(2*_N.pi*sx)   #  f*dt

    ##########  nonstationary center location
    ctr  = _N.empty((M, NT))
    for m in xrange(M):
        ctr[m] = createSmoothedPath(ctr_chpts[m], NT)
        if len(ctr_chpts[m]) > 1:  plastic = True

    if K > 0:
        ##########  nonstationary marks
        mk_MU  = _N.empty((nNrns, NT, K))
        for n in xrange(nNrns):
            mk_MU[n] = createSmoothedPathK(mk_chpts[n], NT, K, LoHis[n])

            if len(mk_chpts[n]) > 1:  plastic = True

    if mvPat == NUNIF:
        now = 0
        for rt in xrange(RTs):
            N = Ns[rt]    #  each traverse slightly different duration
            rp  = _N.random.rand(N/100)
            x     = _N.linspace(Lx, Hx, N)
            xp     = _N.linspace(Lx, Hx, N/100)

            r   = _N.interp(x, xp, rp)       #  creates a velocity vector
            #  create movement without regard for place field
            r += Amx*(1.1+_N.sin(2*_N.pi*_N.linspace(0, 1, N, endpoint=False)*frqmx))
            pth = _N.zeros(N+1)
            for n in xrange(1, N+1):
                pth[n] = pth[n-1] + r[n-1]

            pth   /= (pth[-1] - pth[0])
            pth   *= (Hx-Lx)
            pth   += Lx

            pths[now:now+N]     = pth[0:N]
            now += N
    else:
        now = 0
        x = _N.linspace(Lx, Hx, N)
        for rt in xrange(RTs):
            N = Ns[rt]
            pths[now:now+N]     = x
            now += N

    if addShortStops:
        for ist in xrange(stops):
            done   = False
            while not done:
                t0 = int(_N.random.rand()*NT)
                t1 = t0 + int(stopDur*(1+0.1*_N.random.randn()))
                if _N.abs(_N.max(_N.diff(pths[t0:t1]))) < 0.05*(Hx-Lx):
                    done = True   #  not crossing origin
                
            pths[t0:t1] = _N.mean(pths[t0:t1])

    ###  now calculate firing rates
    dt   = 0.001
    fdt  = f*dt
    #  change place field location
    Lam   = f*dt*_N.exp(-0.5*(pths-ctr)**2 / sx)

    rnds = _N.random.rand(M, NT)

    #dat = _N.zeros((NT, 2 + K))
    dat = _N.zeros((NT, 2 + K))
    dat[:, 0] = pths

    for m in xrange(M):
        sts  = _N.where(rnds[m] < Lam[m])[0]
        dat[sts, 1] = 1

        nrn = nrnNum[m]
        if K > 0:
            for t in xrange(len(sts)):
                dat[sts[t], 2:] = _N.random.multivariate_normal(mk_MU[nrn, sts[t]], Covs[nrn], size=1)

        #  now noise spikes
        if bckgrdLam is not None:
            nzsts  = _N.where(rnds[m] < (bckgrdLam*dt)/float(M))[0]
            dat[nzsts, 1] = 1
            nrn = nrnNum[m]
            if K > 0:
                for t in xrange(len(nzsts)):
                    dat[nzsts[t], 2:] = _N.random.multivariate_normal(mk_MU[nrn, nzsts[t]], Covs[nrn], size=1)

    if thresh is not None:
        sts = _N.where(dat[:, 1] == 1)[0]
        nID, nC = _N.where(dat[sts, 2:] < thresh)

        swtchs  = _N.zeros((len(sts), K))
        swtchs[nID, nC] = 1    #  for all cells, all components below hash == 1

        swtchsK = _N.sum(swtchs, axis=1)
        
        blw_thrsh_all_chs = _N.where(swtchsK == K)[0]
        abv_thr = _N.setdiff1d(_N.arange(len(sts)), blw_thrsh_all_chs)
        print "below thresh in all channels  %(1)d / %(2)d" % {"1" : len(blw_thrsh_all_chs), "2" : len(sts)}
        dat[sts[blw_thrsh_all_chs], 1:] = 0

    bFnd  = False

    ##  us un   uniform sampling of space, stationary or non-stationary place field
    ##  ns nn   non-uni sampling of space, stationary or non-stationary place field
    ##  bs bb   biased and non-uni sampling of space

    bfn     = "" if (M == 1) else ("%d" % M)

    if mvPat == UNIF:
        bfn += "u"
    else:
        bfn += "b" if (Amx > 0) else "n"

    bfn += "n" if plastic else "s"

    iInd = 0
    while not bFnd:
        iInd += 1
        dd   = os.getenv("__EnDeDataDir__")
        fn = "%(dd)s/%(bfn)s%(iI)d.dat" % {"bfn" : bfn, "iI" : iInd, "dd" : dd}
        fnocc="%(dd)s/%(bfn)s%(iI)docc.png" % {"bfn" : bfn, "iI" : iInd, "dd" : dd}
        fnprm = "%(dd)s/%(bfn)s%(iI)d_prms.pkl" % {"bfn" : bfn, "iI" : iInd, "dd" : dd}

        if not os.access(fn, os.F_OK):  # file exists
            bFnd = True

    smk = " %.4f" * K
    _U.savetxtWCom("%s" % fn, dat, fmt=("%.4f %d" + smk), delimiter=" ", com="#  script=%s.py" % script)

    pcklme = {}

    pcklme["l0"]  = l0[:, ::100]
    pcklme["f"]   = ctr[:, ::100]
    pcklme["sq2"] = sx[:, ::100]
    pcklme["u"]   = mk_MU[:, ::100]
    pcklme["covs"]= Covs
    pcklme["intv"]= 100
    pcklme["km"]  = km

    dmp = open(fnprm, "wb")
    pickle.dump(pcklme, dmp, -1)
    dmp.close()

    print "created %s" % fn

    fig = _plt.figure()
    _plt.hist(dat[:, 0], bins=_N.linspace(Lx, Hx, 101), color="black")
    _plt.savefig(fnocc)
    _plt.close()
Beispiel #6
0
def create(Lx,
           Hx,
           N,
           mvPat,
           RTs,
           frqmx,
           Amx,
           pT,
           l_sx_chpts,
           l_l0_chpts,
           l_ctr_chpts,
           mk_chpts,
           Covs,
           LoHis,
           km,
           bckgrdLam=None,
           script="no info",
           addShortStops=False,
           stops=10,
           stopDur=500,
           thresh=None,
           x_mvt=None,
           nz_mvt=None,
           spc_dim=1,
           segs=None):
    """
    km  tells me neuron N gives rise to clusters km[N]  (list)
    bckgrd is background spike rate  (Hz)
    """
    global UNIF, NUNIF
    #####  First check that the number of neurons and PFs all consistent.
    nNrnsSX = len(l_sx_chpts)
    nNrnsL0 = len(l_l0_chpts)
    nNrnsCT = len(l_ctr_chpts)
    nNrnsMK = len(mk_chpts)
    nNrnsMKA = LoHis.shape[0]
    nNrnsMKC = Covs.shape[0]

    if not (nNrnsSX == nNrnsL0 == nNrnsCT == nNrnsMK == nNrnsMKA == nNrnsMKC):
        print("Number of neurons not consistent")
        return None
    nNrns = nNrnsSX

    if not (LoHis.shape[1] == Covs.shape[1] == Covs.shape[2]):
        print("Covariance of LoHis not correct")
        return None
    K = LoHis.shape[1]

    PFsPerNrn = _N.zeros(nNrns, dtype=_N.int)

    sx_chpts = []
    l0_chpts = []
    ctr_chpts = []
    M = 0  #  of place fields total.  a neuron may have > 1 PFs.
    nrnNum = []
    for nrn in range(nNrns):
        #  # of place fields for neuron nrn
        nPFsSX = len(l_sx_chpts[nrn])
        nPFsL0 = len(l_l0_chpts[nrn])
        nPFsCT = len(l_ctr_chpts[nrn])
        sx_chpts.extend(l_sx_chpts[nrn])
        l0_chpts.extend(l_l0_chpts[nrn])
        ctr_chpts.extend(l_ctr_chpts[nrn])

        if not (nPFsSX == nPFsL0 == nPFsCT):
            print("Number of PFs for neuron %d not consistent" % nrn)
            return None
        M += len(l_ctr_chpts[nrn])
        nrnNum += [nrn] * nPFsSX
        PFsPerNrn[nrn] = nPFsSX

    #  M = # of clusters  (in mark + pos space)
    #  nNrns = # of neurons

    if x_mvt is None:
        ####  build data
        Ns = _N.empty(RTs, dtype=_N.int)
        if mvPat == NUNIF:
            for rt in range(RTs):
                Ns[rt] = N * ((1 - pT) + pT * _N.random.rand())
        else:
            Ns[:] = N

        NT = _N.sum(Ns)  #  total time we have data
        pths = _N.empty(NT)
    else:
        NT = x_mvt.shape[0]  #  total time we have data

    plastic = False

    ##########  nonstationary center width
    #  sx_chpts is flattened version of l_sx_chpts, which is a list per neuron
    #  of place field change points.  neur#1 has 1 pf, neur#2 has 5 pfs = 6 pfs
    #  sxt  should be (M x NT)
    sx = _N.empty((M, NT)) if spc_dim == 1 else _N.empty((M, NT, 2, 2))
    isx = _N.empty((M, NT)) if spc_dim == 1 else _N.empty((M, NT, 2, 2))

    for m in range(M):  # sxts time scale
        if spc_dim == 1:
            sx[m] = createSmoothedPath(
                sx_chpts[m],
                NT)**2  #  sx_chpts[m] is an ndaray.  sx_chpts is a list
            if len(sx_chpts[m]) > 1: plastic = True
            isx[m] = 1. / sx[m]
        else:
            sx[m, :, 0,
               0] = createSmoothedPath(sx_chpts[m][:, _N.array([0, 1])], NT)**2
            sx[m, :, 1,
               0] = createSmoothedPath(sx_chpts[m][:, _N.array([0, 2])], NT)**2
            sx[m, :, 0, 1] = sx[m, :, 1, 0]
            sx[m, :, 1,
               1] = createSmoothedPath(sx_chpts[m][:, _N.array([0, 3])], NT)**2
            isx[m] = _N.linalg.inv(sx[m])

    ##########  nonstationary center height l0
    #  f is NT x M
    l0 = _N.empty((M, NT))
    for m in range(M):
        l0[m] = createSmoothedPath(l0_chpts[m], NT)
        if len(l0_chpts[m]) > 1: plastic = True

    f = l0 / _N.sqrt(2 * _N.pi * sx) if spc_dim == 1 else l0 / _N.sqrt(
        (2 * _N.pi) * (2 * _N.pi) * _N.linalg.det(sx))

    ##########  nonstationary center width
    #  sx_chpts is flattened version of l_sx_chpts, which is a list per neuron
    #  of place field change points.  neur#1 has 1 pf, neur#2 has 5 pfs = 6 pfs
    #  sxt  should be (M x NT)
    ctr = _N.empty((M, NT)) if spc_dim == 1 else _N.empty((M, NT, 2))

    for m in range(M):  # sxts time scale
        if spc_dim == 1:
            ctr[m] = createSmoothedPath(
                ctr_chpts[m],
                NT)  #  sx_chpts[m] is an ndaray.  sx_chpts is a list
            if len(ctr_chpts[m]) > 1: plastic = True
        else:
            ctr[m, :,
                0] = createSmoothedPath(ctr_chpts[m][:, _N.array([0, 1])], NT)
            ctr[m, :,
                1] = createSmoothedPath(ctr_chpts[m][:, _N.array([0, 2])], NT)

    if K > 0:
        ##########  nonstationary marks
        mk_MU = _N.empty((nNrns, NT, K))
        print("-------  ")

        for n in range(nNrns):
            mk_MU[n] = createSmoothedPathK(mk_chpts[n], NT, K, LoHis[n])

            if len(mk_chpts[n]) > 1: plastic = True

    if x_mvt is None:
        if mvPat == NUNIF:
            now = 0
            for rt in range(RTs):
                N = Ns[rt]  #  each traverse slightly different duration
                rp = _N.random.rand(N // 100)
                x = _N.linspace(Lx, Hx, N)
                xp = _N.linspace(Lx, Hx, N // 100)

                r = _N.interp(x, xp, rp)  #  creates a velocity vector
                #  create movement without regard for place field
                frqmxR = _N.abs(frqmx * (1 + 0.25 * _N.random.randn()))
                _N.linspace(0, 1, N, endpoint=False)
                rscld_t = _N.random.rand(N)  #  rscld_t
                rscld_t /= (_N.max(rscld_t) * 1.01)
                rscld_t.sort()
                phi0 = _N.random.rand() * 2 * _N.pi

                r += _N.exp(Amx * _N.sin(2 * _N.pi * rscld_t * frqmxR + phi0))
                pth = _N.zeros(N + 1)
                for n in range(1, N + 1):
                    pth[n] = pth[n - 1] + r[n - 1]

                pth /= (pth[-1] - pth[0])
                pth *= (Hx - Lx)
                pth += Lx

                pths[now:now + N] = pth[0:N]
                now += N
        else:  # x_mvt is not None
            now = 0
            x = _N.linspace(Lx, Hx, N)
            for rt in range(RTs):
                N = Ns[rt]
                pths[now:now + N] = x
                now += N

        if addShortStops:
            for ist in range(stops):
                done = False
                while not done:
                    t0 = int(_N.random.rand() * NT)
                    t1 = t0 + int(stopDur * (1 + 0.1 * _N.random.randn()))
                    if _N.abs(_N.max(_N.diff(pths[t0:t1]))) < 0.05 * (Hx - Lx):
                        done = True  #  not crossing origin

                pths[t0:t1] = _N.mean(pths[t0:t1])
    else:
        pths = x_mvt

    ###  now calculate firing rates
    dt = 0.001
    fdt = f * dt
    #  change place field location
    if spc_dim == 1:
        Lam = f * dt * _N.exp(-0.5 * (pths - ctr)**2 / sx)
    else:  #  spc_dim == 2
        Lam = _N.empty((M, NT))
        for m in range(M):
            dif = pths - ctr[m]
            Lam[m] = f[m] * dt * _N.exp(
                -0.5 * _N.einsum("ni,nij,nj->n", dif, isx[m], dif))
    _N.savetxt("lam", Lam.T)
    _N.savetxt("pths", pths)

    rnds = _N.random.rand(M, NT)

    if spc_dim == 1:
        dat = _N.zeros((NT, 2 + K + 1))
        dat[:, 0] = pths
        if segs is not None:
            dat[:, 2 + K] = segs
        else:
            dat[:, 2 + K] = 1
    else:
        dat = _N.zeros((NT, 3 + K + 2))
        if nz_mvt is not None:
            dat[:, 0:2] = nz_mvt
        else:
            dat[:, 0:2] = pths
        dat[:, 7:9] = segs

    spkind = 1 if spc_dim == 1 else 2
    mrk_frm = 2 if spc_dim == 1 else 3
    for m in range(M):
        sts = _N.where(rnds[m] < Lam[m])[0]  #  spikes from this neuron
        print("neuron %(m)d   %(s)d spks" % {"m": m, "s": len(sts)})
        alrdyXst = _N.where(dat[:,
                                spkind] == 1)[0]  #  places where synchronous
        ndToMove = _N.intersect1d(alrdyXst, sts)
        dntMove = _N.setdiff1d(sts, ndToMove)  #  empty slots
        nonsynch = _N.empty(len(sts))
        nonsynch[0:len(dntMove)] = dntMove
        print(len(ndToMove))

        iStart = len(dntMove)  #  in nonsynch

        # for iOcpd in ndToMove:  # occupied
        #     bDone = False
        #     while not bDone:
        #         iOcpd += 1
        #         if datNghbr[iOcpd, 1] == 0:
        #             datNghbr[iOcpd, 1] = 1
        #             nonsynch[iStart]   = iOcpd
        #             iStart += 1
        #             bDone = True

        snonsynch = _N.sort(nonsynch)
        dat[sts, spkind] = 1

        nrn = nrnNum[m]

        if K > 0:
            for t in range(len(sts)):
                obsMrk = _N.random.multivariate_normal(mk_MU[nrn, sts[t]],
                                                       Covs[nrn],
                                                       size=1)
                dat[sts[t], mrk_frm:mrk_frm + K] = obsMrk

        #  now noise spikes
        if bckgrdLam is not None:
            nzsts = _N.where(rnds[m] < (bckgrdLam * dt) / float(M))[0]
            dat[nzsts, spkind] = 1
            nrn = nrnNum[m]
            if K > 0:
                for t in range(len(nzsts)):
                    dat[nzsts[t], mrk_frm:mrk_frm +
                        K] = _N.random.multivariate_normal(mk_MU[nrn,
                                                                 nzsts[t]],
                                                           Covs[nrn],
                                                           size=1)

    if thresh is not None:
        sts = _N.where(dat[:, spkind] == 1)[0]
        nID, nC = _N.where(dat[sts, mrk_frm:mrk_frm + K] < thresh)

        swtchs = _N.zeros((len(sts), K))
        swtchs[nID, nC] = 1  #  for all cells, all components below hash == 1

        swtchsK = _N.sum(swtchs, axis=1)

        blw_thrsh_all_chs = _N.where(swtchsK == K)[0]
        abv_thr = _N.setdiff1d(_N.arange(len(sts)), blw_thrsh_all_chs)
        print("below thresh in all channels  %(1)d / %(2)d" % {
            "1": len(blw_thrsh_all_chs),
            "2": len(sts)
        })
        dat[sts[blw_thrsh_all_chs], spkind] = 0

    bFnd = False

    ##  us un   uniform sampling of space, stationary or non-stationary place field
    ##  ns nn   non-uni sampling of space, stationary or non-stationary place field
    ##  bs bb   biased and non-uni sampling of space

    bfn = "" if (M == 1) else ("%d" % M)

    if mvPat == UNIF:
        bfn += "u"
    else:
        bfn += "b" if (Amx > 0) else "n"

    bfn += "n" if plastic else "s"

    iInd = 0
    while not bFnd:
        iInd += 1
        dd = os.getenv("__JIFIDataDir__")
        fn = "%(dd)s/%(bfn)s%(iI)d.dat" % {"bfn": bfn, "iI": iInd, "dd": dd}
        fnocc = "%(dd)s/%(bfn)s%(iI)docc.png" % {
            "bfn": bfn,
            "iI": iInd,
            "dd": dd
        }
        fnprm = "%(dd)s/%(bfn)s%(iI)d_prms.pkl" % {
            "bfn": bfn,
            "iI": iInd,
            "dd": dd
        }

        if not os.access(fn, os.F_OK):  # file exists
            bFnd = True

    smk = " %.2f" * K

    if spc_dim == 1:
        smk += " %d"
        _U.savetxtWCom("%s" % fn,
                       dat,
                       fmt=("%.4f %d" + smk),
                       delimiter=" ",
                       com="#  script=%s.py" % script)
    else:
        smk += " %d %d"
        _U.savetxtWCom("%s" % fn,
                       dat,
                       fmt=("%.2f %.2f %d" + smk),
                       delimiter=" ",
                       com="#  script=%s.py" % script)

    pcklme = {}

    pcklme["l0"] = l0[:, ::100]
    pcklme["u"] = mk_MU[:, ::100]
    pcklme["covs"] = Covs
    pcklme["intv"] = 100
    pcklme["km"] = km

    pcklme["f"] = ctr[:, ::100]
    pcklme["sq2"] = sx[:, ::100]

    dmp = open(fnprm, "wb")
    pickle.dump(pcklme, dmp, -1)
    dmp.close()

    print("created %s" % fn)
Beispiel #7
0
def done():
    """
    come here after 6 landmarks chosen
    """

    global r, seg_ts, segs, Nsgs, inout, a_inout, lindist, lin_lr, lin_inout, lin_lr_inout, lr, raw_lindist
    global scxMin, scxMax, scyMin, scyMax
    global an, day, ep

    hdir = _N.empty(2)
    vdir = _N.empty(2)
    linp = _N.empty(2)
    """
    L5       L0       L3
    ||       ||       ||
    ||       ||       ||
    5        1        3
    ||       ||       ||
    ||       ||       ||
    L4===4===L1===2===L2
    """
    scxMin, scxMax, scyMin, scyMax = get_boundaries(r)
    segs_from_landmarks(segs, landmarks, length)
    e = inout_dir(segs, Nsgs)
    a_s, b_s, c_s = slopes_of_segs(segs)

    _plt.plot([segs[0, 0, 0], segs[0, 1, 0]], [segs[0, 0, 1], segs[0, 1, 1]],
              lw=3,
              color="black")
    _plt.plot([segs[1, 0, 0], segs[1, 1, 0]], [segs[1, 0, 1], segs[1, 1, 1]],
              lw=3,
              color="black")
    _plt.plot([segs[2, 0, 0], segs[2, 1, 0]], [segs[2, 0, 1], segs[2, 1, 1]],
              lw=3,
              color="black")
    _plt.plot([segs[3, 0, 0], segs[3, 1, 0]], [segs[3, 0, 1], segs[3, 1, 1]],
              lw=3,
              color="black")
    _plt.plot([segs[4, 0, 0], segs[4, 1, 0]], [segs[4, 0, 1], segs[4, 1, 1]],
              lw=3,
              color="black")

    segsr = segs.reshape((10, 2))

    clrs = ["blue", "orange", "red", "green", "yellow", "black", "brown"]
    fillin_unobsvd(r)

    N = r.shape[0]
    seg_ts = _N.empty(N, dtype=_N.int)
    lindist = _N.empty(N)
    lin_lr = _N.empty(N)
    lin_inout = _N.empty(N)
    lin_lr_inout = _N.empty(N)
    lr = _N.ones(N, dtype=_N.int) * -3

    inout = _N.empty(N, dtype=_N.int)
    a_inout = _N.empty(N)
    gk = gauKer(30)
    gk /= _N.sum(gk)
    fx = _N.convolve(0.5 * (r[:, 1] + r[:, 3]), gk, mode="same")
    fy = _N.convolve(0.5 * (r[:, 2] + r[:, 4]), gk, mode="same")
    xp = fx
    yp = fy
    xpyp = _N.empty((N, 2))
    xpyp[:, 0] = xp
    xpyp[:, 1] = yp

    _xpyp = _N.repeat(xpyp, Nsgs * 2, axis=0)
    rxpyp = _xpyp.reshape((N, Nsgs * 2, 2))

    dv = segsr - rxpyp
    dists = _N.sum(dv * dv, axis=2)  # closest point on maze from field points
    rdists = dists.reshape((N, Nsgs, 2))
    print rdists.shape

    online = _N.empty(Nsgs, dtype=bool)
    mins = _N.empty(Nsgs)

    for n in xrange(N):
        x0 = xpyp[n, 0]
        y0 = xpyp[n, 1]
        #  xcs, ycs: pt on all line segs closest to x0, y0 (may b byond endpts)
        xcs = (b_s *
               (b_s * x0 - a_s * y0) - a_s * c_s) / (a_s * a_s + b_s * b_s)
        ycs = (-a_s *
               (b_s * x0 - a_s * y0) - b_s * c_s) / (a_s * a_s + b_s * b_s)

        find_clsest(n, x0, y0, segs, rdists, seg_ts, Nsgs, online, offset, xcs,
                    ycs, mins, linp)

    # fig = _plt.figure()
    # _plt.plot(seg_ts)
    # clean_seg_ts(seg_ts)

    # _plt.plot(seg_ts)

    raw_lindist = _N.zeros(N)
    lindist_x0y0(N, xpyp, segs, rdists, seg_ts, Nsgs, online, offset, a_s, b_s,
                 c_s, mins, linp, raw_lindist)

    smooth_lindist(raw_lindist, lindist)

    # fig = _plt.figure(figsize=(10, 4))
    # _plt.plot(lindist)
    # gk = gauKer(8)   #  don't want to make this too large.  if we just pass  through the choice point, we can miss it.
    # gk /= _N.sum(gk)
    # flindist = _N.convolve(lindist, gk, mode="same")
    # lindist  = flindist

    # rm_lindist_jumps(N, lindist, seg_ts)
    fig = _plt.figure(figsize=(10, 4))
    _plt.plot(lindist)

    spd_thr = 0.35
    a_inout_x0y0(N, a_inout, inout, r, seg_ts, spd_thr, e)
    #_plt.plot([x0, x0], [y0, y0], ms=10, marker=".", color=clr)

    make_lin_inout(N, lindist, inout, lin_inout)
    make_lin_lr(N, lr, lindist, seg_ts, r)
    build_lin_lr_inout(N, lin_lr_inout, lindist, lr, inout, gkRWD)

    #  inout
    cp_lr, cp_inout = cpify_LR_inout(lr, inout)

    sday = ("0%d" % day) if (day < 10) else ("%d" % day)
    fn = _edd.datFN("lindist.dat",
                    dir="linearize/%(an)s%(dy)s0%(ep)d" % {
                        "dy": sday,
                        "ep": (ep + 1),
                        "an": anim2
                    },
                    create=True)
    _N.savetxt(fn, lindist, fmt="%.3f")
    fn = _edd.datFN("cp_lr.dat",
                    dir="linearize/%(an)s%(dy)s0%(ep)d" % {
                        "dy": sday,
                        "ep": (ep + 1),
                        "an": anim2
                    })
    _U.savetxtWCom(
        fn,
        cp_lr,
        fmt="%d %d",
        com=
        ("# N=%d.  1st column time, 2nd column  - inout value from this time until time in next row"
         % N))
    fn = _edd.datFN("cp_inout.dat",
                    dir="linearize/%(an)s%(dy)s0%(ep)d" % {
                        "dy": sday,
                        "ep": (ep + 1),
                        "an": anim2
                    })
    _U.savetxtWCom(
        fn,
        cp_inout,
        fmt="%d %d",
        com=
        ("# N=%d.  1st column time, 2nd column  - inout value from this time until time in next row"
         % N))
    fn = _edd.datFN("lin_lr_inout.dat",
                    dir="linearize/%(an)s%(dy)s0%(ep)d" % {
                        "dy": sday,
                        "ep": (ep + 1),
                        "an": anim2
                    })
    _N.savetxt(fn, lin_lr_inout, fmt="%.3f")
    """
    """
    t0 = 0
    winsz = 1000
    t1 = 0
    iw = -1
    while t1 < N:
        iw += 1
        t0 = iw * winsz
        t1 = (iw + 1) * winsz if (iw + 1) * winsz < N else N - 1
        #btwnfigs(anim2, day, ep, t0, t1, inout, [-1.1, 1.1], seg_ts+1, [0.9, 5.1], r, 1, 2, scxMin, scxMax, scyMin, scyMax)
        btwnfigs(anim2, day, ep, t0, t1, inout, "INOUT", [-1.1, 1.1], lr, "LR",
                 [-1.1, 1.1], lin_lr_inout, "lin_lr_inout", [-6.1, 6.1], r, 1,
                 2, scxMin, scxMax, scyMin, scyMax)