def test_convergence(): prm = deepcopy(dft_prm) prm['n']['shape'] = (300, ) prm['n']['death'] = 10**-14 prm['growth']['std'] = 0 #10**-14 kwargs = {} m = Model(parameters=prm, dynamics=dft_dyn, **kwargs) # code_debugger() tmax = 5000. m.evol(print_msg=1, tmax=tmax, tsample=tmax / 30., converge='force') trial = 0 while np.sum(m.results['n'][-1] > 10**-10) < 2: print trial m.evol(print_msg=0, tmax=tmax, tsample=tmax / 30., converge='force') trial += 1 plot(m.results['n'].index, m.results['n'].matrix, log='xy', hold=1) code_debugger() m.evol(print_msg=1, tmax=tmax, reseed=0, tsample=tmax / 30., converge=0) plot(m.results['n'].index, m.results['n'].matrix, log='xy', hold=1, marker='o', linestyle='None') plt.show() code_debugger()
def continuum_cavity_output(prm, **kwargs): dic = {} dic.update(kwargs) dic.update(prm) Stot = prm['n_shape'][0] if kwargs.get('groups', 1): resolution = dic.get('resolution', 10) rank = dic['ranks'] = np.linspace(dic.get('ranks_min', 0), dic.get('ranks_max', 1), resolution) fnc = continuum_func functions = {} conv = {'avgK': 'mean_k', 'varK': 'sigma_k'} fnconv = {'varK': np.sqrt, 'sigma': np.sqrt} for n in ('S', 'avgK', 'varK', 'mu', 'sigma', 'gamma'): functions[n] = fnc(dic[n + 'prm'], **dict(dic.get(n + 'opt', ()))) if n in ('mu', 'sigma', 'gamma'): ranks = np.tile(rank, (len(rank), 1)) xs = (ranks.T, ranks) else: xs = [rank] dic[conv.get(n, n)] = fnconv.get(n, lambda x: x)(functions[n](*xs)) dic['S'] /= np.sum(dic['S']) / Stot Smean = np.mean(dic['S']) dic['mu'] *= Smean dic['sigma'] *= np.sqrt(Smean) #return group_cavity_solve_props(**dic) #else: #print dic N1, N2, V, Phi, success = group_cavity_solve(**dic) #from datatools import plot,scatter #plot(rank,N1*Phi) plot(N1, N2, Phi, xs=rank, legend=['N1', 'N2', 'Phi'], hold=1) N1, N2, V, Phi = [inter.interp1d(rank, x) for x in (N1, N2, V, Phi) ] #continuum_cavity_solve(**dic) for n in functions: dic[n] = functions[n] res = continuum_calc_props(N1=N1, N2=N2, V=V, Phi=Phi, **dic) else: raise return res
def show_hist(axes=None, values=None, **kwargs): idx, binned = data_to_matrix(axes=axes, values=values, mode='bin', **kwargs) if kwargs.get('newfig', 1): plt.figure() from datatools import mhist nbpanels = len(values) panel = MutableInt(0) dico = kwargs.get('dictionary', {}) def get_dico(val): return dico.get(val, val) results = {} for val in values: if nbpanels > 1: auto_subplot(plt, nbpanels, panel) plt.xlabel(get_dico(val)) plt.ylabel('Frequency') if kwargs.get('split_by', 0): raise Exception("Not done yet") else: coords = [] legends = [] for i, binn in zip(idx, binned): print 'showhist', i, len(binn), len(binn[val]) lst = [ll for l in binn[val] for ll in l] #for lst in binn[val]: #print len(lst) xs, ys = mhist(lst, bins=kwargs.get('bins', 30)) coords.append((xs, ys)) legends.append('{}'.format(get_dico(i))) for c in coords: plot(c[0], c[1], hold=1, linestyle='None', marker='o', **{k: kwargs[k] for k in kwargs if k in ('log', )}) plt.legend(legends) results[val] = coords return results
def test_tsample(): #NOTHING DEPENDS ON TSAMPLE IN DETERMINISTIC DYNAMICS prm = deepcopy(dft_prm) kwargs = {} m = Model(parameters=prm, dynamics=dft_dyn, **kwargs) tmax = 5000 m.evol( print_msg=1, tmax=tmax, tsample=tmax / 100, ) traj = m.results['n'] plot(traj.index, traj.matrix, hold=1, log='y') m.evol(print_msg=1, tmax=tmax, tsample=tmax / 10, reseed=0) traj = m.results['n'] plot(traj.index, traj.matrix, hold=1, linestyle='None', marker='o') plt.show()
def analyze_trajectory(model, hold=0, log='y'): if isinstance(model, basestring): from models import BaseModel m = BaseModel.load(model, initialize=False) else: m = model from datatools import plot, plt for var in m.results: plt.figure() res = m.results[var].matrix plot(np.array([res[t].ravel() for t in range(res.shape[0])]), hold=1, xs=m.results[var].index, log=log, title=var) if not hold: plt.show()
def test_noise(): #NOISE EFFECT IS THOROUGHLY INDEPENDENT OF tsample EVER SINCE I SWITCHED TO dop853 prm = deepcopy(dft_prm) prm['nnoise'] = { 'type': 'noise', 'variables': ['n'], 'amplitude': 1, 'sign': 1, 'role': 'noise', 'dynamics': 'noise', 'rank': 'full', #'direction':'random', } prm['n']['shape'] = (3, ) dyn = deepcopy(dft_dyn) dyn['noise'] = { 'type': 'noise', 'variables': [ ('n', 'com'), ], } kwargs = {} m = Model(parameters=prm, dynamics=dyn, **kwargs) tmax = 20 nstep = 10 m.evol(print_msg=1, tmax=tmax, tsample=float(tmax) / nstep / 10.) from datatools import plt traj = m.results['n'] noise = m.data['nnoise'] plt.subplot(121) plot(traj.index, traj.matrix, hold=1, log='y') plt.subplot(122) plot(noise.index, noise.matrix[:, :2], hold=1, log='y') m.evol(print_msg=1, tmax=tmax, tsample=float(tmax) / nstep, reseed=0) traj = m.results['n'] noise = m.data['nnoise'] plt.subplot(121) plot(traj.index, traj.matrix, hold=1, linestyle='None', marker='o') plt.subplot(122) plot(noise.index, noise.matrix[:, :2], hold=1, log='y', linestyle='None', marker='o') plt.show()
def measure_cascade(model, measure, prefix='n_groups_', **kwargs): print 'MEASURING TROPHIC CASCADES' r = model.data['growth'].matrix.copy() Aij = model.find_data(role='interactions').matrix.copy() if 'selfint' in model.data: D = model.data['selfint'].matrix.copy() else: D = np.ones(r.shape) if np.max(D) == 0: D[:] = 1 Aij = (Aij.T / D).T r /= D Nf = model.results['n'][-1].copy() alive = (Nf > 10**-5 * np.mean(Nf)) #measure['n_death']*1.1 ) #First compute jacobian Alive = Aij[np.ix_(alive, alive)] Slive = np.sum(alive) Nlive = Nf[alive] Dlive = D[alive] B = (Alive - np.eye(Slive)) # B2=((Alive.T/Dlive).T-np.eye(Slive)) from sicpy.linalg import inv DN = np.diag(Nlive) diag = np.diag(Nlive * Dlive) J = np.dot(diag, B) invJ = inv(J) Press = -invJ PressRel = -inv(B) # print J2 tscore = measure['trophic_scores'].copy() #Then get matrix of variances from scipy.linalg import solve_continuous_lyapunov as solve_lyapunov, norm, inv Cij = solve_lyapunov(J, -DN) var = 'n' measure['{}_matrix_press'.format(var)] = norm(invB, ord='fro')**2 / Slive measure['{}_matrix_var'.format(var)] = 2 * np.trace(Cij) / Slive print 'lyap', Cij #then get average of jacobian for species with Delta T ~ 1 and Delta T ~ 2 Var = variance_interactions(J, lift=1) Var /= np.abs(np.mean(np.diag(Var))) srcs = [('tscore', tscore)] niche = None if 'niche' in model.data: niche = model.data['niche'].matrix.copy() srcs += [('niche', niche)] print 'Var', Var print 'Press', Press print tscore print niche NF = Nf.copy() for tgt in range(len(tscore)): print '\n Hit ', tgt test = np.zeros(tscore.shape) test[tgt] = 1 print 'PRESS', np.dot(Press, np.dot(DN, test)) if 1: # THIS WORKS! m2 = model.copy(initialize=False) # m2.data['growth'].matrix+=np.dot(DN,test)*0.05 m2.parameters['immigration'] = { 'type': 'matrix', 'variables': ['n'], 'dynamics': 'nlv', 'role': 'influx', 'matrix': 0.05 * Nf[tgt] * test } m2.initialize(labels=['immigration']) m2.make_dynamics() m2.evol(converge=1, tmax=1000000, tsample=.1, print_msg=0, reseed=False, extend=True) print 'EMPIRICAL PRESS', (m2.results['n'][-1] - NF) / 0.05 print 'VAR\n', np.diag(solve_lyapunov(J, -DN * test)) if 0: plt.subplot(131) plt.imshow((solve_lyapunov(J, -DN * test))) plt.colorbar() plt.title('Theory') m3 = model.copy(initialize=False) if 1: m3.parameters['nnoise'] = { 'type': 'noise', 'variables': ['n'], 'amplitude': Nf[tgt] * 0.05, 'role': 'noise', 'dynamics': 'noise', 'rank': 1, 'direction': tgt, } m3.dynamics['noise'] = { 'type': 'noise', 'variables': [ ('n', 'com'), ], } m3.initialize(labels=['nnoise']) m3.make_dynamics() m3.evol(converge=0, tmax=20., tsample=0.1, print_msg=1, extend=True, reseed=False) # print m3.results['n'].matrix from datatools import plot plt.subplot(133) plot([ np.var(m3.results['n'].matrix[:i, tgt] / 0.05) for i in range(1, len(m3.results['n'].index)) ], hold=1 ) #m3.results['n'].matrix,xs=m3.results['n'].index ,hold=1) print 'EMPIRICAL VAR\n', np.diag( np.cov(m3.results['n'].matrix.T / 0.05)) plt.subplot(132) plt.imshow(np.cov(m3.results['n'].matrix.T / 0.05)) #/Nlive.reshape(len(Nlive),1))) plt.title('Empirical') plt.colorbar() # plt.subplot(133) # plt.imshow((solve_lyapunov(J, -DN * test))/(np.cov(m3.results['n'].matrix.T/Nlive.reshape(len(Nlive),1))) ) # plt.title('Ratio') # plt.colorbar() plt.show() #print np.add.outer(tscore[alive],-tscore[alive]) for dref, s, v, mode in iproduct((1, 2), srcs, [('press', Press), ('var', Var)], ('', 'loc')): lab, src = s lval, val = v dist = np.add.outer(-src[alive], src[alive]) std = 1. / 5. if mode == 'loc': #Include only the levels right below cyc = 100000. else: #Go down the whole chain cyc = 2. res = (val * np.exp(-(np.abs(dist - dref) % cyc)**2 / (2 * std**2)))[dist > 0] #/np.sqrt(2*np.pi*std**2 ) #res[dist<=0]=0 if not res.shape: res = [0] #print lab,lval,dref,mode, res measure['n_cascade{}_{}_{}_{}'.format(mode, lab, lval, dref)] = np.sum(res) measure['n_cascade{}_{}_{}_{}_std'.format( mode, lab, lval, dref)] = np.std(res) * np.sqrt(len(res)) if 0 and lval == 'press' and lab == 'tscore' and dref == 1 and mode == '': from datatools import scatter3d #plt.subplot(121) #scatter(niche,tscore,hold=1) #plt.subplot(122) # kwargs['TMP'](model,measure,hold=1) plt.figure() val[dist <= 0.1] = 0 print 'Nf=', Nf pos = np.add.outer(src[alive], np.zeros(len(src[alive]))) scatter3d(pos[val != 0], dist[val != 0], val[val != 0], hold=1) plt.xlabel('pos') plt.ylabel('dist') plt.show() if dref == 1: resinv = val * (np.abs(dist) % cyc) measure['n_cascdist{}_{}_{}_pos'.format( mode, lab, lval)] = np.sum(resinv[val > 0]) / np.sum(val[val > 0]) measure['n_cascdist{}_{}_{}_neg'.format( mode, lab, lval)] = np.sum(resinv[val < 0]) / np.sum(val[val < 0]) if dref == 2: if measure['n_cascade{}_{}_{}_1'.format(mode, lab, lval)] < 0: measure['n_cascade{}_{}_{}'.format( mode, lab, lval)] = measure['n_cascade{}_{}_{}_2'.format( mode, lab, lval)] / measure['n_cascade_{}_{}_1'.format( lab, lval)] else: measure['n_cascade{}_{}_{}'.format(mode, lab, lval)] = 0
def measure_gen(model, measure, typ='usual', **kwargs): '''Function called by most other measures''' model = model.copy(initialize=False) prm = model.parameters for var in model.results: #GENERAL SETUP comm = [ i for i, j in prm.iteritems() if i in model.data and j.get('role', None) == 'interactions' and var in j['variables'][0] ] if not comm: continue comm = comm[0] growth = [ i for i, j in prm.iteritems() if j.get('role', None) == 'growth' and var in j['variables'][0] ][0] A = model.get_labeled_data(comm) r = model.get_labeled_data(growth) axis = A.axes[0] try: capa = [ i for i, j in prm.iteritems() if j.get('role', None) == 'capacity' and var in j['variables'][0] ][0] K = model.get_labeled_data(capa) except: capa = [ i for i, j in prm.iteritems() if j.get('role', None) == 'diagonal' and var in j['variables'][0] ][0] if not capa in model.data: model.initialize() D = model.get_labeled_data(capa) K = r / D K.matrix = np.clip(K.matrix, 0, 100) measure['{}_capacity_std'.format(var)] = np.std(K.matrix) measure['{}_capacity_mean'.format(var)] = np.mean(K.matrix) measure['{}_selfint_std'.format(var)] = np.std(D.matrix) try: traj = model.results[var] Nf = LabeledArray(traj.matrix[-1], axes=prm[var]['axes']) except: code_debugger() othax = tuple(i for i, a in enumerate(prm[var]['axes']) if not a == axis[-1]) death = kwargs.get('death', prm[var].get('death', 0)) if othax: alive = np.where(np.sum(Nf.matrix > death, axis=othax))[0] else: alive = np.where(Nf.matrix > death)[0] #print alive, Nf,np.sum(Nf.matrix>death,axis=othax),othax,axis,prm[var]['axes'] Nlive = Nf.matrix.mean(othax)[alive] nlive = Nlive / np.mean(Nlive) def idxs(data): return np.ix_(*[alive if ax == axis else [0] for ax in data.axes]) S = A.matrix.shape[0] Alive = A[idxs(A)] rlive = r[idxs(r)] Klive = K[idxs(K)] Slive = len(alive) dx = model.get_dx(traj.index[-1], model.get_labeled_result( idx=-1))[var].matrix / Nf.matrix Slivestrict = S - np.sum(dx < -10**-8) #SPECIFIC MEASURES if 'abundance' in typ: measure['{}_abundance'.format(var)] = tuple(Nf.matrix.ravel()) if 'usual' in typ: '''Typical measurements: biomass, lyapunov function, number of species alive''' ax = prm[var]['axes'].index(axis[-1]) measure['{}_#alive'.format(var)] = Slive measure['{}_%alive'.format(var)] = Slive / float( traj.shape[1 + ax]) measure['{}_%alive_strict'.format(var)] = Slivestrict / float( traj.shape[1 + ax]) #print traj.shape[1+ax], len(alive) measure['{}_biomass'.format(var)] = np.mean(traj[-1].sum(othax)) measure['{}_biomass2'.format(var)] = np.mean( (traj[-1]**2).sum(othax)) measure['{}_biomass*'.format(var)] = measure['{}_biomass'.format( var)] / measure['{}_%alive'.format(var)] measure['{}_biomass_tot'.format(var)] = np.sum(traj[-1]) measure['{}_biomass_std'.format(var)] = np.std(traj[-1].sum(othax)) measure['{}_simpson'.format(var)] = np.sum( traj[-1].sum(othax)** 2) / measure['{}_biomass_tot'.format(var)]**2 measure['{}_biomass_relstd'.format( var)] = measure['{}_biomass_std'.format(var)] / measure[ '{}_biomass'.format(var)] measure['{}_productivity'.format(var)] = np.sum(r.matrix * traj[-1]) measure['{}_productivity_ratio'.format( var)] = measure['{}_productivity'.format(var)] / measure[ '{}_biomass_tot'.format(var)] measure['{}_diff'.format(var)] = np.median(traj[-1].std(othax)) #J= np.dot(np.diag(nlive), ) if 'degree' in typ: mat = A.matrix.copy() indeg = np.sum(mat != 0, axis=1) outdeg = np.sum(mat != 0, axis=0) measure['n_degree'] = np.mean(indeg) measure['n_degree_std'] = np.std(indeg) measure['n_alive_degree'.format(comm)] = np.mean( np.sum((A.matrix[np.ix_(alive, alive)] != 0), axis=1)) if 'effective' in typ: '''Measure effective value of interactions''' prefix = '{}_'.format(var) try: rescaled_int = (A / D).matrix.copy() except: rescaled_int = (A * K / r).matrix.copy() matrix = A.matrix.copy() S = matrix.shape[0] if not np.isnan(rescaled_int).any(): measure['n_connectivity'] = np.sum( np.abs(rescaled_int) + np.abs(rescaled_int.T) > 10**-15 ) * 1. / (rescaled_int.size - S) #print measure['n_connectivity'] else: measure['n_connectivity'] = np.sum( np.abs(matrix) > 10**-15) * 1. / (matrix.size - S) np.fill_diagonal(matrix, np.nan) np.fill_diagonal(rescaled_int, np.nan) mats = [matrix, rescaled_int] if 'finalprm' in typ: matlive = Alive.copy() np.fill_diagonal(matlive, np.nan) mats.append(matlive) for mat, pref in zip(mats, [ prefix + 'interactions_', prefix + 'couplings_', prefix + 'couplings_final_' ])[1:]: effS = np.sum( (np.abs(mat) + np.abs(mat.T)) != 0) * 1. / mat.shape[ 0] #*measure['n_connectivity'] ref = mat.copy() ref[np.isnan(ref)] = 0 mat = mat.copy() #mat[(np.abs(mat)+np.abs(mat).T) ==0]=np.nan #Ignore missing edges effS = mat.shape[0] mat2 = mat.copy() #mat2[(mat) ==0]=np.nan #Ignore missing edges if not 'threshold' in model.data and 0: #Remove edges that are too large for i in range(mat.shape[0]): if (mat[i] < -1).any(): mat[i, :] = np.nan mat[:, i] = np.nan measure[pref + 'mean'] = mu = np.mean(nonan(mat2)) measure[pref + 'std'] = std = np.std(nonan(mat2)) measure[pref + 'row_std'] = np.std(np.sum( ref, axis=1)) # np.std([np.mean(nonan(mat[i])) # for i in range(S) if len(nonan(mat[i]))] ) colstd = np.std(ref - np.mean(ref, axis=1), axis=1) measure[pref + 'col_std'] = np.std(colstd) * np.sqrt(effS) measure[pref + 'col_sigma'] = np.mean(colstd) * np.sqrt(effS) measure[pref + 'rowcol'] = np.mean(colstd * np.sum(ref, axis=1)) tmp = np.abs(ref) + np.abs(ref.T) Nnei = np.dot((tmp != 0), Nf.matrix) / np.sum(tmp != 0, axis=1) measure[pref + 'n_std'] = np.std(Nnei) / np.mean(Nnei) measure[pref + 'n_mean'] = np.mean(Nnei) sym = (np.mean(nonan((mat - mu) * (mat - mu).T))) #-mu**2) #print sym, (np.mean(nonan(mat*mat.T))-mu**2) if 0: measure[pref + 'symmetry'] = sym measure[pref + 'symstd'] = np.std(nonan(mat * mat.T)) mus = [-mu * effS, -np.mean(np.sum(ref, axis=1))] measure[pref + 'mu'] = mus[0] sigs = [ std * np.sqrt(effS), ] measure[pref + 'sigma'] = sigs[0] if std > 0: gams = sym / std**2, #,np.mean(rm*rm.T)/np.mean(rm**2) ] #OPTION 2 IS WRONG: all the missing links contribute to the mean here measure[pref + 'gamma'] = gams[0] else: measure[pref + 'gamma'] = 0 #print ref[0],ref[:,0] #print mus,sigs,gams #print np.sum(ref,axis=1), np.mean( nonan(mat)),np.mean(ref[ref!=0] ) #print comm, measure[prefix+'symmetry'], std**2, mat[0,1],mat[1,0] measure['{}_growth_mean'.format(var)] = np.mean(r.matrix) measure['{}_growth_std'.format(var)] = np.std(r.matrix) measure['{}_growth_not'.format(var)] = np.sum((r.matrix <= 0)) adjusted = K.shape[0] * (K.shape[0] - 1) measure['{}_corr_KA'.format(var)] = np.sum( (-K * A / D).matrix) / adjusted - np.mean( K.matrix) * np.sum(-(A / D).matrix) / adjusted from datatools import hist, scatter if 'removal' in typ: remends = [] from trajectory import Trajectory difs = [] n0 = [] vs = np.zeros((S, S)) try: rescaled_int = (A / D).matrix.copy() except: rescaled_int = (A * K / r).matrix.copy() offdiag = zip(*[(i, j) for i in range(S) for j in range(S) if i != j]) a = -(rescaled_int - np.mean(rescaled_int[offdiag])) / np.std( rescaled_int[offdiag]) / np.sqrt(S) a[np.isnan(a)] = 0 for i in range(S): oth = range(S) oth.remove(i) if (not i in alive) or np.abs(Nf.matrix[i]) < 10**-12: remends.append(Nf.matrix[oth]) continue #print "Removing",i mat = Nf.matrix.copy() mat[i] = 0 model.results[var] = Trajectory(init=mat) model.evol(tmax=50000, tsample=50000, reseed=0, keep='endpoint') nothf = Nf.matrix[oth] nothr = model.results[var][-1][oth] remends.append(nothr) difs.append(nothr - nothf) #tmp=a[oth,i]*measure['v']*Nf.matrix[i]/np.mean(Nf.matrix) #scatter(tmp,difs[-1]/np.mean(Nf.matrix)) #vs[oth,i]=(nothf/np.mean(nothf) - nothr/np.mean(nothr) )*np.mean(Nf.matrix)/Nf.matrix[i]/a[oth,i] vs[oth, i] = (nothr - nothf) / Nf.matrix[i] / a[oth, i] vs[a == 0] = 0 #print np.mean(np.abs(a.ravel() ) ) #hist(vs.ravel(),log='y') measure['removal_diff'] = np.mean( [np.sum(np.abs(d)) for d in difs]) / np.mean(Nf.matrix) if (vs != 0).any(): measure['removal_v'] = np.mean( [np.mean(v[v != 0]) for v in list(vs)]) measure['removal_v_std'] = np.std( [np.mean(v[v != 0]) for v in list(vs)]) else: measure['removal_v'] = measure['removal_v_std'] = 0 #print measure['removal_v'],measure['removal_v_std'] model.results[var] = Trajectory(init=Nf.matrix) if 'testcavity' in typ: n0 = [] h = measure['h'] v = measure['v'] q = measure['q'] phi = measure['phi'] sigma = measure['n_couplings_sigma'] mu = measure['n_couplings_mu'] gamma = measure['n_couplings_gamma'] sigma_k = measure['n_capacity_std'] avgN = measure['avgN'] meanN = np.mean(Nf.matrix) sigma_l = (sigma_k / sigma / avgN) l0s = [] try: rescaled_int = (A / D).matrix.copy() except: rescaled_int = (A * K / r).matrix.copy() #np.fill_diagonal(rescaled_int,np.nan) offdiag = zip(*[(i, j) for i in range(S) for j in range(S) if i != j]) a = -(rescaled_int + mu / S) / sigma sums = [] dN = [] difs = [] difnexts = [] n0rem = [] compv = [] for i in range(S): oth = range(S) oth.remove(i) diff = a[oth, i] * v * Nf.matrix[i] / meanN difs.append(diff) diffnext = np.dot(a[oth, :], a[:, i]) * v**2 * Nf.matrix[i] / meanN difnexts.append(diffnext) nbefore = Nf.matrix[oth] / meanN + diff #+diffnext nbefore[Nf.matrix[oth] < 10**-15] = 0 SUM = np.dot(a[i, oth], nbefore) l0 = (K.matrix[i] - measure['n_capacity_mean']) / sigma / meanN l0s.append(l0) u = (1 - mu / S) / sigma ut = (u - gamma * v) res = (h + l0 - SUM) / ut n0.append(res) sums.append(SUM) dd = (K.matrix[i] - Nf.matrix[i] + np.dot(rescaled_int[i, oth], Nf.matrix[oth])) dN.append(dd) if 'removal' in typ: nbefore2 = remends[i] / meanN #scatter(diff,nbefore2-Nf.matrix[oth]/meanN) SUM2 = np.dot(a[i, oth], nbefore2) res2 = (h + l0 - SUM2) / ut n0rem.append(res2) comp = nbefore - nbefore2 compv.append(comp) #scatter(n0,n0rem) n0rem = np.array(n0rem) n0 = np.array(n0) dN = np.array(dN) measure['cavity_alive_truepos'] = np.sum( n0[alive] > 0) * 1. / len(alive) measure['cavity_alive_falseneg'] = np.sum( n0[alive] < 0) * 1. / len(alive) measure['cavity_alive'] = np.sum(n0 > 0) * 1. / n0.shape[0] measure['cavity_dN'] = np.sum(dN > -10**-6) * 1. / n0.shape[0] #print measure['cavity_dN'] measure['cavity_diff'] = np.mean([np.sum(np.abs(d)) for d in difs]) measure['cavity_diffnext'] = np.mean( [np.sum(np.abs(d)) for d in difnexts]) if 'removal' in typ: measure['cavity_v_vs_rem'] = np.mean( [np.sum(np.abs(d)) for d in compv]) measure['cavity_alive_rem'] = np.sum( n0rem > 0) * 1. / n0.shape[0] measure['cavity_meansum'] = np.mean(SUM) measure['cavity_corrsum'] = np.mean( np.array(SUM) * l0) - np.mean(SUM) * np.mean(l0) if 0 and rescaled_int.any(): #PLOTS! from datatools import hist, plot, plt, scatter #if dN.any(): #print dN #hist(dN) plt.subplot(224) hist(a[offdiag], hold=1, normed=1, log='y') hist(sums, hold=1, normed=1) plt.subplot(223) Nf.axes = [('n', 'com')] t = ((r - D * Nf + A.dot(Nf, axes=[('n', 'com')])).matrix) / avgN hist(t[alive], hold=1, normed=0, bins=20, log='y') plt.subplot(221) xs = np.linspace(min(Nlive / avgN), max(Nlive / avgN), 100) plt.ylim(ymin=0.0001, ymax=1) hist(Nlive / avgN, hold=1, normed=1, bins=20) var = (q - 1) / phi plot(xs, np.exp(-(xs - 1 / phi)**2 / 2 / var) / np.sqrt(var), log='y', hold=1, title='n+') plt.subplot(222) xs = np.linspace(min(n0), max(n0), 100) plt.ylim(ymin=0.0001, ymax=1) hist(n0, hold=1, normed=1, bins=20, title='n0') #print 'SHOULD HAVE',S,mu,sigma,sigma_k,gamma print 'RECEIVING q v h phi', q, v, h, phi print 'phi', len([z for z in t if z > -10**-16 ]) * 1. / S, len(alive) * 1. / S, phi, len( [z for z in n0 if z > 0]) * 1. / S print '<n>', np.mean(Nf.matrix / avgN), 1 print '<n^2>', np.mean((Nlive / avgN)**2), q print '<n>+', np.mean(Nlive / avgN), 1 / phi print '<n^2>+', np.mean((Nlive / avgN)**2), q / phi print '<n0>', np.mean(n0), h / ut print 'var_n0', np.var(n0), (q + sigma_l**2) / ut**2 print ' (in which q', q / ut**2, 'sigL', sigma_l**2 / ut**2, ')' print 'sig_l', np.std(l0s), sigma_l, 'sig_k', sigma_k, np.std( K.matrix) plot(xs, np.exp(-((ut * xs - h)**2 / 2 / (q + sigma_l**2))) / np.sqrt(q + sigma_l**2), log='y') if 'finalprm' in typ: measure['{}_growth_final_mean'.format(var)] = np.mean(rlive) measure['{}_growth_final_std'.format(var)] = np.std(rlive) measure['{}_growth_final_not'.format(var)] = np.sum((rlive <= 0)) measure['{}_capacity_final_mean'.format(var)] = np.mean(Klive) measure['{}_capacity_final_std'.format(var)] = np.std(Klive) if 'matrix' in typ: def variance(J, D): #Jeff's variance #Jhat= lifted_matrix(J) from scipy.linalg import solve_continuous_lyapunov as solve_lyapunov, norm, inv #tmp=np.zeros(J.shape) #tmp[0,:]=Nlive #res=np.dot(inv(Jhat),tmp.ravel() ) #print '1',res.reshape(J.shape)[:5,:5] #tmp=np.zeros(J.shape) #tmp[:,0]=Nlive #res=np.dot(inv(Jhat),tmp.ravel() ) #print '2',res.reshape(J.shape)[:5,:5] res = solve_lyapunov(J, -np.dot(D, D)) #print 'X',(res)[:5,:5] return np.trace(res) / Slive Dlive = rlive / Klive Dlive[Klive == 0] = 1 #DP=(r/K).matrix #DP[r.matrix==0]=1 if 0: B = Alive - np.eye(Slive) * Dlive #BP=A.matrix-np.eye(S)*DP else: B = (Alive.T / Dlive).T - np.eye(Slive) D = np.diag(Nlive) J = np.dot(D, B) #2*variance(J,D**(1./2) ) #raise from scipy.linalg import solve_continuous_lyapunov as solve_lyapunov, norm, inv from scipy.sparse.linalg import eigs if Slive > 0: #measure['{}_empirical_press'.format(var)]=np.linalg.norm(np.linalg.inv(B),ord='fro')**2/Slive try: measure['{}_matrix_press'.format(var)] = norm( inv(B), ord='fro')**2 / Slive measure['{}_matrix_var'.format(var)] = 2 * variance( J, D**(1. / 2)) except Exception as e: print e Slive = 0 #print CIJ[:3,:3] if Slive == 0: #measure['{}_empirical_press'.format(var)]=0 for suffix in ('press', 'var', 'eig', 'symeig'): measure['{}_matrix_{}'.format(var, suffix)] = 0 elif 0: CIJ = solve_lyapunov(J, -D) from datatools import scatter, plot, hist prefix = 'n_couplings_' mu = measure[prefix + 'mu'] sigma = measure[prefix + 'sigma'] gamma = measure[prefix + 'gamma'] a = (-Alive - mu / S) / sigma ut = measure['utilde'] n = Nlive np.fill_diagonal(a, 0) AA = ((a**2).T * (Nlive / np.add.outer(Nlive, Nlive))).T AG = ((a * a.T) * (Nlive / np.add.outer(Nlive, Nlive))) DEN = ut**2 - np.sum(AG, axis=1) RESULT = np.dot(inv(np.diag(DEN) - AA), ut / 2 * np.ones(Slive) / sigma) #/Slive if 0: #plot(np.diag(CIJ), np.diag(CIJ),hold=1) #scatter( np.diag(CIJ) ,(ut/2/sigma + np.dot(AA,np.diag(CIJ) ) ) /DEN ,hold=1,log='xy') CIJ2 = CIJ.copy() np.fill_diagonal(CIJ2, 0) test = np.dot(a, CIJ2) np.fill_diagonal(test, 0) print 'a.(C-diagC),', test[:3, :3], np.mean(test) * Slive print 'a.C', np.dot( a, CIJ)[:3, :3], np.mean(np.dot(a, CIJ)) * Slive print 'mean offdiag', np.mean(CIJ2), 'diag', np.mean( np.diag(CIJ)) print 'diag*u', np.mean(np.diag(CIJ)) * ut #hist(CIJ2[CIJ2!=0],log=1) #plot(np.diag(CIJ), np.diag(CIJ),hold=1) #scatter( np.sum(DEN*np.diag(CIJ)) ,np.sum(ut/2/sigma + np.dot(AA,np.diag(CIJ) ) ) ,hold=1,log='xy') #DEN2=ut**2 - np.sum(AG,axis=1) #scatter( np.sum(DEN2*np.diag(CIJ)) ,np.sum(ut/2/sigma + np.dot(AA,np.diag(CIJ) ) ) ,color='r',hold=1,log='xy') measure['{}_pred_var'.format(var)] = np.mean( (ut / 2 / sigma + np.dot(AA, np.diag(CIJ))) / DEN) * 2 #np.mean(RESULT)*2 print '{}_pred_var'.format(var), measure['{}_pred_var'.format( var)], measure['variability2'] if 'matrix_eig' in typ: try: measure['{}_matrix_eig'.format(var)] = eigs(J, k=1, sigma=0)[0][0].real measure['{}_pool_symeig'.format(var)] = eigs( (BP + BP.T) / 2., k=1, sigma=0)[0][0].real except: pass #test= np.sum( #np.linalg.inv(B)**2)/Slive #assert abs(test / measure['{}_empirical_chi2'.format(var)] -1) <0.001 if 'corr' in typ: prefix = '{}_interactions_corr_'.format(var) def corr(form): return np.mean(nonan(np.einsum(form + '->ijk', mat, mat))) measure[prefix + 'ijkj'] = (corr('ij,kj') - mu**2) / std**2 measure[prefix + 'ijik'] = (corr('ij,ik') - mu**2) / std**2 measure[prefix + 'ijjk'] = (corr('ij,jk') - mu**2) / std**2 if 'trophic' in typ: '''Trophic score and fraction of basal species. NB: should I decide basal status from absence of outgoing trophic links, or from nonzero carrying capacity in the absence of trophic interactons?''' if not alive.shape[0]: Nlive = np.ones(1) tscore = np.zeros(1) else: tscore = trophic_score(Alive, pops=Nlive, influx=rlive * Nlive) prefix = '{}_trophic_'.format(var) (measure[prefix + 'max'], measure[prefix + 'mean'], measure[prefix + 'std'], measure[prefix + '20'], measure[prefix + '80']) = [ x(tscore) for x in ( #lambda e: np.sum(e==0)/float(len(e)), np.max, np.mean, np.std, lambda e: np.percentile(e, 20), lambda e: np.percentile(e, 80)) ] Asum = Alive.sum(axis=1) niches = [ i for i, j in prm.iteritems() if j.get('role', None) == 'niche' ] if niches: niche = model.data[niches[0]].matrix[alive] measure[prefix + 'niche'] = np.corrcoef(niche, tscore)[0, 1] measure[prefix + 'weighted_mean'] = np.sum(tscore * Nlive) / np.sum(Nlive) measure[prefix + 'corr_r'] = np.mean( tscore * rlive) / (np.mean(tscore) * np.mean(rlive)) - 1 measure[prefix + 'corr_N'] = np.mean( tscore * Nlive) / (np.mean(tscore) * np.mean(Nlive)) - 1 measure[prefix + 'corr_A'] = np.mean( tscore * Asum) / (np.mean(tscore) * np.mean(Asum)) - 1 #print Asum, measure[prefix+'corr_A'] if kwargs.get('trophic_score_pool', 0): #DEACTIVATE FOR FASTER MEASUREMENTS pool_tscore = trophic_score(A.matrix, influx=r.matrix) measure[prefix + 'pool_max'] = np.max(pool_tscore) measure[prefix + 'pool_mean'] = np.mean(pool_tscore) measure[prefix + 'basal'] = np.sum(tscore < 1.1) / float(len(tscore)) if 'assembly_corr' in typ: prefix = '{}_corr_'.format(var) mask = np.ones(Alive.shape, dtype=bool) np.fill_diagonal(mask, 0) meanAlive = np.mean(Alive[mask]) #meanK=np.mean(Klive) #meanNf=np.mean(Nf[alive] ) #print meanA.shape, meanK.shape, Alive.shape,Klive.shape,(Alive*Klive).shape,np.mean((Alive*Klive)[mask]).shape if abs(meanAlive) < 10**-5: meanAlive = 1 measure[prefix + 'AK'] = 0 measure[prefix + 'AN'] = 0 else: measure[prefix + 'AK'] = np.mean( (Alive.T * Klive)[mask]) #/meanAlive/meanK -1 measure[prefix + 'AN'] = np.mean( (Alive.T * nlive)[mask]) #/meanAlive/meanNf-1
def test_smallS(nsys=20000): def compute(mode, pop): def offdiag(mat): diag = np.eye(mat.shape[0]) return mat[diag == 0] if mode in ('zeta', 'K'): pop = [p[1] for p in pop] list1 = np.concatenate(pop) else: pop = [p[0] for p in pop] list1 = np.concatenate([offdiag(p) for p in pop]) if mode == 'gamma': list2 = np.concatenate([offdiag(p.T) for p in pop]) return np.corrcoef(list1, list2)[0, 1] if mode == 'sigma': return np.std(list1) * np.sqrt(pop[0].shape[0]) if mode == 'mu': return -np.mean(list1) * (pop[0].shape[0]) if mode == 'zeta': return np.std(list1) if mode == 'K': return np.mean(list1) Ss = [2, 3, 4, 8, 16] table = [] models = {} populations = {} for S in Ss: populations[S] = [] models[S] = [] loctable = [] for sys in range(max(1, nsys / S)): print S, sys prm = deepcopy(dft_prm) kwargs = {} kwargs['n_shape'] = (S, ) kwargs['community_mean'] = .3 kwargs['community_std'] = .4 kwargs['community_symmetry'] = .9 m = Model(parameters=prm, dynamics=dft_dyn, **kwargs) #print m.data['community'].matrix tmax = 5000 m.evol(print_msg=0, tmax=tmax, tsample=tmax / 3, converge=1) measure = {} measure.update(m.export_params()) meas2 = deepcopy(measure) models[S].append(m.copy()) measure_gen(m, measure, typ=['usual', 'effective', 'matrix']) measure['S'] = S measure_cavity(m, measure) loctable.append(measure) populations[S].append((m.find_data(role='interactions').matrix, m.find_data(role='growth').matrix)) table += loctable for m, measure in zip(models[S], loctable): # code_debugger() #THEORETICAL PREDICTIONS WITH IMPOSED PARAMETER VALUES prefix = 'n_couplings_' meas2[prefix + 'mu'] = [ kwargs['community_mean'], compute('mu', populations[S]) ][-1] meas2[prefix + 'sigma'] = [ kwargs['community_std'], compute('sigma', populations[S]) ][-1] meas2[prefix + 'gamma'] = [ kwargs['community_symmetry'], compute('gamma', populations[S]) ][-1] meas2['n_capacity_std'] = [ meas2['growth_std'], compute('zeta', populations[S]) ][-1] meas2['n_capacity_mean'] = compute('K', populations[S]) measure_cavity(m, meas2) measure.update({ 'THEO_' + i: j for i, j in meas2.iteritems() if not 'n_' in i }) # code_debugger() table = pd.DataFrame(table) df = table.groupby('S').mean() #table=[] #for S in Ss: #measure=df.loc[S].to_dict() #measure['n_shape']=(S,) #measure_cavity(models.pop(0),measure) #table.append(measure) #df=pd.DataFrame(table) comp = { 'phi': 'n_%alive', 'avgN': 'n_biomass', 'stdN': 'n_biomass_std', 'stdN': 'n_biomass_std', 'n_couplings_gamma': 'n_couplings_gamma', 'n_couplings_sigma': 'n_couplings_sigma', 'n_couplings_mu': 'n_couplings_mu' } sd = table[['S'] + list(set(comp.keys() + comp.values()))].groupby('S').std() dico = {'n_couplings_gamma': 'gamma', 'n_couplings_sigma': 'sigma'} for x in comp: plt.figure() for z in ('gamma', 'sigma', 'mu'): if z in x: res = [compute(z, populations[S]) for S in Ss] plot(Ss, res, color='r', hold=1) if not 'n_' in x: plot(Ss, df['THEO_' + x], hold=1, color='r') from datatools import errorbar errorbar(Ss, df[x], yerr=sd[x], hold=1, title=dico.get(x, x)) scatter(Ss, df[comp[x]], hold=1) plt.show()
def test_cavity_funcresp(): from datatools import * from cavity_conv import * dic = { 'S': 100., 'mu': 10., 'sigma': .3, 'sigma_k': 1., 'gamma': .05, 'Nc': 50., 'avgK': 1. } #S=dic['S'] locals().update(dic) Nc = dic['Nc'] print cavity_solve_props(**dic) N1, N2, v, phi, f = funcresp_cavity_solve(**dic) print N1, N2, v, phi, f #v*phi/2., avgN = N1 * phi q = N2 * phi / avgN**2 h = (dic.get('avgK', 1) / avgN - mu) / sigma vv = v * sigma * phi print q, vv, h, phi print N1 * phi, np.sqrt(N2 * phi - (N1 * phi)**2), N1 * phi * S, N2 / (S * phi * N1**2) u = (1 - mu * 1. / S) res = [] fs = np.linspace(0, 1, 20) for f in fs: utilde = u - gamma * v * (1 - f) * phi * sigma**2 effvar = (1 - f)**2 * phi * sigma**2 * N2 + sigma_k**2 mean = (avgK - mu * (phi * N1 * (1 - f) + f * Nc / S)) / utilde var = effvar / utilde**2 res.append((mean, var)) res = zip(*res) #plot(res[0],res[1],xs=fs) #return res = [] comp = [] Ncs = np.logspace(0, 2, 30) for Nc in Ncs: dic['Nc'] = Nc rs = [] for trials in range(3): r = funcresp_cavity_solve(**dic) rs.append(r) rs = np.array(rs) mean = (avgK - mu * Nc / S) / u if (rs[:, -1] > 0.8).any() and (rs[:, -1] < 0.2).any(): print rs.T[-2:] res.append(np.mean(rs, axis=0)) N1, N2, v, phi, f = res[-1] dic2 = {} dic2.update(dic) dic2['sigma'] *= max(0.001, (1 - f)) dic2['gamma'] /= np.clip((1 - f), np.abs(dic2['gamma']), 1.) dic2['mu'] *= (1 - f) + f * Nc / S / N1 / phi c = cavity_solve_props(**dic2) comp.append( (c['avgN'], c['q'], c['v'] / dic['sigma'] / c['phi'], c['phi'])) res = np.array(zip(*res)) comp = np.array(zip(*comp)) plot(res[0] * res[3], comp[0], xs=Ncs, hold=1) plt.figure() plot(res[1] / res[0]**2 / res[3], comp[1], xs=Ncs, hold=1) plt.figure() plot(res[2], comp[2], xs=Ncs, hold=1) plt.figure() plot(res[3], comp[3], xs=Ncs, hold=1) plt.show() titles = ['N1', 'N2', 'v', 'phi', 'f'] for r in res: plt.figure() plt.title(titles.pop(0)) plot(Ncs, r, hold=1) plt.show()
def test_noise_amplitude(**kwargs): #CONCLUSIONS: V ~ 1/2r works but requires very long tmax if r is not too large prm = { 'n': { 'type': 'variable', 'axes': ('com', ), 'death': -10000, 'shape': (1, ), 'mean': 0., 'std': 0, }, 'diag': { 'type': 'matrix', 'variables': ['n'], 'role': 'diagonal', 'mean': -2, 'std': 0, 'dynamics': 'lin', }, # 'community': { # 'type': 'matrix', # 'variables': [('n', 'com'), ('n', 'com')], # 'role': 'interactions', # 'mean': 5., # 'std': .5, # 'symmetry': .5, # 'dynamics': 'lin', # }, 'nnoise': { 'type': 'noise', 'variables': ['n'], 'amplitude': 1, 'role': 'noise', 'dynamics': 'noise', 'rank': 1, 'direction': 0, } } dyn = { 'lin': { 'type': 'linear', 'variables': [ ('n', 'com'), ], }, 'noise': { 'type': 'noise', 'variables': [ ('n', 'com'), ], } } m = Model(parameters=prm, dynamics=dyn, **kwargs) ts = np.linspace(0.02, 3, 50) dt = 0.1 tmax = 30. from datatools import plot import scipy.integrate as scint for mode in (1, ): #MODE = 0 : Hand integration #MODE = 1 : model.evol with pregenerated noise res = [] for t in ts: print t m.data['diag'][:] = -1. / t # print tmax nstep = tmax / dt if mode: m.evol(print_msg=1, tmax=tmax, tsample=dt, death=-10000, dftol=-10000) traj = m.results['n'].matrix else: m.data['nnoise'].generate(0, 2 * tmax, dt) noise = np.array([ m.data['nnoise'].get(t).matrix for t in np.linspace(0, tmax, nstep) ]) # xs=m.results['n'].index # plot(traj,xs=xs,hold=1) xs = np.linspace(0, tmax, nstep)[1:] traj = [0] t0 = 0 def dxx(t, x): # print m.data['nnoise'].get(t)[0] return m.data['diag'][0] * x + np.random.normal( 0, 1) / np.sqrt(dt / 100) for t in xs[1:]: x = traj[-1] for tt in np.linspace(t0, t, 100): x += dxx(tt, x) * (t - t0) / 100. traj.append(x) t0 = t # plot(traj,xs=xs) # print ' RATIO ', np.var(traj),np.var(noise) res.append([np.var(traj) * 2 * np.abs(m.data['diag'][0]) ]) #,np.var(noise)]) res = np.array(res) plot(res, xs=ts, log='y', hold=1) plt.show()
def continuum_interactions(measure, rank, A, K): func = lambda x, a, b, c: continuum_quad(x, a, b, c) hist, dens = np.histogram(rank, bins=50, normed=1) bins = np.array((dens[1:] + dens[:-1])) / 2. Stot = len(rank) Sprm = np.array( [curve_fit(func, bins[hist > 0], np.log(hist[hist > 0] * Stot))[0]]) measure['Sprm'] = Sprm measure['Sopt'] = (('dim', 1), ) S = continuum_func(Sprm, **dict(measure['Sopt'])) Kpos, Kneg = (K > 0), (K <= 0) avgKprm = np.array([ curve_fit(func, np.array(rank), np.log(np.abs(ks)))[0] for ks in ( np.clip(K, 0.00001, None), np.clip(K, None, -0.00001), ) ]) measure['avgKprm'] = avgKprm measure['avgKopt'] = (('sgn', (1, -1)), ('dim', 1)) avgK = continuum_func(avgKprm, **dict(measure['avgKopt'])) varKprm = np.array([ curve_fit(func, np.array(rank[Ax]), np.log( (K[Ax] - avgK(rank[Ax]))**2))[0] for Ax in (Kpos, Kneg) if sum(Ax) ]) measure['varKprm'] = varKprm measure['varKopt'] = (('dim', 1), ) if 0: Rank = sorted(rank) print sinteg.quad(S, Rank[0], Rank[-1]), Stot from datatools import plot, scatter, plt plot(Rank, S(Rank), hold=1) scatter(bins, hist * Stot, xlabel='rank', ylabel="S", title='S') plot(Rank, avgK(Rank), hold=1) scatter(rank, K) func = lambda x, a, bx, cx, by, cy, bxy, cxy: continuum_quad( x, a, bx, cx, by, cy, bxy, cxy) measure['ranks_min'] = np.min(rank) measure['ranks_max'] = np.max(rank) ranks = np.tile(rank, (len(rank), 1)) shape = ranks.shape xs = ranks.ravel() ys = ranks.T.ravel() A[np.abs(A) < 10**-5] = 0 Apos = (A.ravel() > 0) Aneg = (A.ravel() < 0) muprm = np.array([ curve_fit(func, np.array((xs[Ax], ys[Ax])), np.log(np.abs(A).ravel()[Ax]))[0] for Ax in (Apos, Aneg) if np.sum(Ax) > 7 ]) #print mupopt,mumopt mu = continuum_func(muprm, sgn=ifelse(np.sum(Aneg) > 7, -1, 1)) measure['muprm'] = muprm measure['muopt'] = (('sgn', -1), ) if 1: from datatools import scatter3d, plt Ranks = np.tile(sorted(rank), (len(rank), 1)) XS = Ranks YS = Ranks.T if 0: scatter3d(xs[::], ys[::], np.log(np.abs(A).ravel()[::]), hold=1, alpha=0.3, c='r') plt.gca().plot_wireframe( XS, YS, np.log(np.abs(mu(XS.ravel(), YS.ravel()))).reshape(shape)) plt.show() sigs = np.clip((A.ravel() - mu(xs, ys))**2, 10**-5, None) sigprm = np.array([ curve_fit(func, np.array((xs[Ax], ys[Ax])), np.log(sigs[Ax]))[0] for Ax in ((A.ravel() != 0), ) ]) #(Apos,Aneg)]) measure['sigmaprm'] = sigprm sigma = continuum_func(sigprm, sgn=1) if 0: scatter3d(xs[:], ys[:], np.log(sigs[:]), hold=1, alpha=0.3, c='r') plt.gca().plot_wireframe( XS, YS, np.log(sigma(XS.ravel(), YS.ravel())).reshape(shape)) plt.show() gam = (A.ravel() - mu(xs, ys)) * (A.T.ravel() - mu(ys, xs)) / np.sqrt( sigma(xs, ys), sigma(ys, xs)) gammprm = np.array([ curve_fit(func, np.array((xs[Ax], ys[Ax])), np.log(np.abs(gam[Ax])), method='trf', loss='soft_l1')[0] for Ax in (Apos, Aneg) ]) measure['gammaprm'] = gammprm gamma = continuum_func(gammprm, sgn=[-1, -1], mode='exp') if 0: gam = np.clip(gam, -1, 1) #scatter3d(xs[::],ys[::], gam,hold=1,alpha=0.3,c=gam) scatter3d(0, 0, 0, hold=1) plt.gca().plot_wireframe(XS, YS, gamma(XS.ravel(), YS.ravel()).reshape(shape)) plt.show()
def make_figure(idx, mat, title='', newfig=1, axes=None, values=None, runavg=None, style='heatmap', log='', zrange=None, logcutoff=10**-10, **kwargs): show_labels = kwargs.get('show_labels', 1) if len(values) > 1 or isinstance(values[0], tuple): #print mat.shape,idx.shape for iv, val in enumerate(values): if isinstance(val, tuple): val, prm = val else: prm = {} kw = {} kw.update(kwargs) kw['style'] = style kw['log'] = log kw['zrange'] = zrange kw['title'] = title kw.update(prm) #print val, kw make_figure(idx, mat[[slice(None) for x in mat.shape[:-1]] + [iv]], newfig=newfig, axes=axes, values=[val], **kw) newfig = False return dico = kwargs.pop('dictionary', {}) def get_dico(val): return dico.get(val, val) if newfig: fig = plt.figure() else: fig = plt.gcf() color = kwargs.pop('color', kwargs.get('c', 'b')) if len(axes) == 1: if title: plt.title(title) X = idx Y = mat if runavg: #Running average from datatools import runavg as average X = average(X, runavg) Y = average(Y, runavg) if 'y' in log: X = X[Y.squeeze() > logcutoff] Y = Y[Y > logcutoff] if show_labels: plt.xlabel(get_dico(axes[0])) plt.ylabel(get_dico(values[0])) if style == 'plot': mk = kwargs.pop('marker', None) plot(X, Y, hold=1, color=color, log=log, **kwargs) elif style == 'scatter': #print color, kwargs if 'marker' in kwargs and kwargs['marker'].get_fillstyle( ) == 'none': kwargs['c'] = 'none' kwargs['edgecolor'] = color else: kwargs['c'] = color scatter(X, Y, hold=1, log=log, **kwargs) elif len(axes) == 2: if title: title = '{}: '.format(get_dico(title)) X, Y = idx.T Z = mat if runavg: print 'WARNING: running average not ready for 3d.' from datatools import runavg as average X = average(X, runavg) Y = average(Y, runavg) Z = average(Z, runavg) xnb = len(set(X.ravel())) ynb = len(set(Y.ravel())) shape = (xnb, ynb) if xnb * ynb != X.shape[0]: if style == 'wireframe': print 'Changing style to scatter', values, shape, X.shape color = 'k' style = 'scatter' if style == 'heatmap': #X,Y,Z=xs,ys,H if 'x' in log: plt.xscale('log') if 'y' in log: plt.yscale('log') if show_labels: plt.xlabel(get_dico(axes[0])) plt.ylabel(get_dico(axes[1])) plt.title('{}{}'.format(title, values[0])) plt.pcolor(X.reshape(shape), Y.reshape(shape), Z.reshape(shape)) plt.colorbar() #scatter(X,Y,c=Z,s=300,log='x') elif style == 'wireframe': if newfig: ax = fig.add_subplot(111, projection='3d') else: ax = plt.gca() if show_labels: plt.xlabel(get_dico(axes[0])) plt.ylabel(get_dico(axes[1])) plt.title('{}{}'.format(title, values[0])) ax.set_zlim(bottom=min(Z), top=max(Z)) if zrange: ax.set_zlim(bottom=zrange[0], top=zrange[1]) #X,Y,Z=xs,ys,H try: ax.plot_wireframe( X.reshape(shape), Y.reshape(shape), Z.reshape(shape), ) except Exception as e: print 'COULD NOT PLOT', values, title, X.shape, Y.shape, shape print e else: if newfig: ax = fig.add_subplot(111, projection='3d') else: ax = plt.gca() if show_labels: plt.xlabel(get_dico(axes[0])) plt.ylabel(get_dico(axes[1])) plt.title('{}{}'.format(title, values[0])) ax.set_zlim(bottom=np.min(Z), top=np.max(Z)) if zrange: ax.set_zlim(bottom=zrange[0], top=zrange[1]) if log: ax.set_zscale('log') #X,Y,Z=xs,ys,H if color is None: color = Z ax.scatter(X, Y, Z, c=color)