cons = params[2]['c'] table = MakeMatrices(assumptions) commfunction = pd.read_csv(r"phi_global.csv", header=None) phii = np.zeros((1,Stot)) #print(phii) for j in range(Stot): phii[0,j] = commfunction.iat[j,0] #print(phii) plate1 = Community(init_state,dynamics,params,parallel=False) #on Windows, set parallel to False #print(plate1.N) c_param = np.array(cons) for i in range(nofwells): plate1.params[i]['c'] = c_param #print(cons) plt.figure() fig,ax=plt.subplots() sns.heatmap(cons,vmin=0,square=True,linewidths=.5,xticklabels=False,yticklabels=False,cbar=True,ax=ax)
R0 = pd.DataFrame(R0,index=D.index,columns=N0.keys()) init_state=[N0,R0] #Make parameter list m = 1+0.01*np.random.randn(len(c)) params=[{'w':1, 'g':1, 'l':0.8, 'R0':R0.values[:,k], 'r':1., 'tau':1 } for k in range(len(N0.T))] for k in range(len(params)): params[k]['c'] = c params[k]['D'] = D params[k]['m'] = m HMP = Community(init_state,dynamics,params) HMP.metadata = pd.DataFrame(np.asarray([0,1,alpha,0,1,alpha]),index=N0.T.index,columns=['alpha']) HMP.metadata['Environment'] = ['Site 1']*3 + ['Site 2']*3 HMP.SteadyState(plot=False,tol=1e-3,verbose=False) with open(folder+'_'.join(['comm']+exp.split(' '))+'S'+str(HMP_protocol['S'])+'.dat','wb') as f: pickle.dump([HMP.N,HMP.R,params[0],R0,HMP.metadata],f) HMP.N.to_csv(folder+'_'.join(['N']+exp.split(' '))+'S'+str(HMP_protocol['S'])+'.csv') HMP.metadata.to_csv(folder+'_'.join(['m']+exp.split(' '))+'S'+str(HMP_protocol['S'])+'.csv') ################No modularity#################### exp = 'No modularity' HMP_protocol.update({'SA': 6*800+200, #Number of species in each family 'MA': 6*50, #Number of resources of each type 'Sgen': 0, 'waste_type':0, 'n_wells':3
Stot = len(N0) nwells = len(N0.T) init_state = [N0, R0] #Make parameter list m0 = 0.5 + 0.01 * np.random.randn(len(c)) params_EMP = [{ 'c': c, 'm': m0 + 10 * np.random.rand(), 'w': 1, 'D': D, 'g': 1, 'l': 0.8, 'R0': R0.values[:, k], 'tau': 1 } for k in range(len(N0.T))] EMP = Community(init_state, dynamics, params_EMP) EMP.metadata = pd.DataFrame(np.asarray( [np.mean(item['m']) for item in params_EMP]), index=N0.T.index, columns=['m']) #Integrate to steady state and save print('Starting integration.') NTraj, Rtraj = EMP.RunExperiment(np.eye(EMP_protocol['n_wells']), 2, 10, refresh_resource=False, scale=1e6) with open(folder + 'EMP.dat', 'wb') as f: pickle.dump([EMP.N, EMP.R, params_EMP, EMP.metadata], f) print('Finished stage 1.') NTraj, Rtraj = EMP.RunExperiment(np.eye(EMP_protocol['n_wells']),
def RunCommunity(assumptions, M, eps=1e-5, trials=1, postprocess=True, run_number=0, cutoff=1e-5, max_iter=1): fun = np.inf k = 0 assumptions['n_wells'] = trials assumptions['waste_type'] = 0 assumptions['MA'] = M assumptions['Sgen'] = 0 S = int(round(M / assumptions['gamma'])) Stot = S * 2 assumptions['S'] = S assumptions['SA'] = Stot if assumptions['sampling'] == 'Binary': p_c = 1 / ((M * assumptions['sigc']**2 / assumptions['muc']**2) + 1) assumptions['c1'] = assumptions['muc'] / (M * p_c) assumptions['omega'] = 1 / assumptions['tau'] assumptions['sigw'] = 0 assumptions['sigD'] = np.sqrt( (assumptions['sparsity'] / (assumptions['sparsity'] + 1)) * ((M - 1) / M)) assumptions['mug'] = 1 assumptions['sigg'] = 0 if assumptions['single']: #Get closed-form solution for large <N> limit of single args_closed = np.asarray([np.nan, np.nan, np.nan]) eps_d = 1 / assumptions['l'] - 1 y0 = assumptions['muc']**2 * eps_d / (assumptions['gamma'] * assumptions['sigc']**2) out = minimize_scalar(lambda Delta: (y(Delta) - y0)**2, bracket=[-5, 5]) #r2 = lambda Delta: assumptions['l']*w0(Delta)/assumptions['gamma'] #y0 = lambda Delta: (assumptions['muc']**2/(assumptions['gamma']*assumptions['sigc']**2))*((1/assumptions['l'])-(1-r2(Delta)))*(1+4*r2(Delta))**(3/2) #y0 = lambda Delta: (assumptions['muc']**2/(assumptions['gamma']*assumptions['sigc']**2))*(r2(Delta)*assumptions['sigD']**2/(assumptions['l']-assumptions['sigD']**2) + ((1/assumptions['l'])-(1-r2(Delta)))*(1+4*r2(Delta))**(3/2))/(1-r2(Delta)*assumptions['sigD']**2/(1-r2(Delta)*assumptions['sigD']**2/(assumptions['l']-assumptions['sigD']**2))) #out = minimize_scalar(lambda Delta:(y(Delta)-y0(Delta))**2,bracket=[-5,5]) DelN_closed = out.x r2 = assumptions['l'] * w0(DelN_closed) / assumptions['gamma'] R0 = assumptions['m'] / ( (1 - assumptions['l']) * assumptions['mug'] * assumptions['muc']) r3 = assumptions['gamma']**2 * assumptions['sigc']**2 * r2 * ( 1 + eps_d) * DelN_closed / (assumptions['muc']**2 * assumptions['l'] * w0(DelN_closed) * w1(DelN_closed)) R_closed = R0 / (1 - r3) qR_closed = (R_closed**2 / assumptions['l']) * ( (r2 * (1 - 2 * r2 + eps_d - 6 * r2 * eps_d) * assumptions['sigc'] * assumptions['sigD'] * assumptions['gamma']**2 / (assumptions['l'] * assumptions['muc'] * w0(DelN_closed) * w1(DelN_closed)))**2 + 1) - assumptions['sigm']**2 * assumptions['sigD']**2 / ( assumptions['l'] * (1 - assumptions['l'])**2 * assumptions['sigc']**2 * assumptions['mug']**2) args_closed[0] = R_closed args_closed[2] = qR_closed assumptions['kappaE_M'] = assumptions['R0'] / M args_closed[1] = (assumptions['kappaE_M'] - assumptions['omega'] * R_closed) * assumptions['gamma'] / assumptions['m'] else: assumptions['kappa'] = np.mean(assumptions['R0'] / assumptions['tau']) while fun > eps and k < max_iter: params = usertools.MakeParams(assumptions) params['R0'] = assumptions['R0'] if assumptions['single']: params['R0'] = np.zeros(M) params['R0'][0] = assumptions['R0'] N0, R0 = usertools.MakeInitialState(assumptions) TestPlate = Community([N0, R0], [dNdt, dRdt], params) TestPlate.SteadyState() #Find final states TestPlate.N[TestPlate.N < cutoff] = 0 #RE2_M0 = TestPlate.R.loc[('T0','R0')].mean()**2/M Rmean = TestPlate.R.drop(('T0', 'R0')).mean(axis=0) R2mean = (TestPlate.R.drop(('T0', 'R0'))**2).mean(axis=0) Nmean = (Stot * 1. / S) * TestPlate.N.mean(axis=0) #Compute moments for feeding in to cavity calculation args0 = np.asarray([np.mean(Rmean), np.mean(Nmean), np.mean(R2mean)]) + 1e-10 args0_err = np.asarray([np.std(Rmean), np.std(Nmean), np.std(R2mean)]) if assumptions['single']: bounds = [(np.log(args0[0]) - 1, np.log(args0[0]) + 1), (-5, 5)] out = opt.minimize(cost_function_single, [np.log(args0[0]), 0], args=(assumptions, ), bounds=bounds, tol=1e-8) R_single = np.exp(out.x[0]) DelN_cav = out.x[1] N_single = (assumptions['kappaE_M'] - assumptions['omega'] * R_single) * assumptions['gamma'] / assumptions['m'] eps_d = (assumptions['omega'] + assumptions['muc'] * N_single / assumptions['gamma']) / ( assumptions['muc'] * assumptions['l'] * N_single / assumptions['gamma']) - 1 omega_eff = assumptions[ 'omega'] + assumptions['muc'] * N_single / assumptions['gamma'] kappa_eff = assumptions['l'] * assumptions[ 'muc'] * N_single * R_single / assumptions['gamma'] r2 = kappa_eff * w0(DelN_cav) / (assumptions['gamma'] * omega_eff * R_single) qR_single = ( (assumptions['omega'] + assumptions['muc'] * N_single / assumptions['gamma']) / (assumptions['muc'] * N_single * assumptions['l'] / assumptions['gamma']) ) * (R_single**2 * ( (r2 * (1 - 2 * r2 + eps_d - 6 * r2 * eps_d) * assumptions['sigc'] * assumptions['sigD'] * assumptions['gamma']**2 / (assumptions['l'] * assumptions['muc'] * w0(DelN_cav) * w1(DelN_cav)))**2 + 1) - assumptions['sigm']**2 * assumptions['sigD']**2 / ((1 - assumptions['l'])**2 * assumptions['sigc']**2 * assumptions['mug']**2)) qR_adj = ( r2 * (1 - 2 * r2 + eps_d - 6 * r2 * eps_d) * assumptions['sigc'] * assumptions['gamma']**2 / (assumptions['l'] * assumptions['muc'] * w0(DelN_cav) * w1(DelN_cav)))**2 * R_single**2 - assumptions['sigm']**2 / ( (1 - assumptions['l'])**2 * assumptions['sigc']**2 * assumptions['mug']**2) args_cav = [R_single, N_single, qR_single] else: bounds = [(np.log(args_closed[k]) - 1, np.log(args_closed[k]) + 1) for k in range(3)] out = opt.minimize(cost_function, np.log(args_closed), args=(assumptions, ), bounds=bounds, tol=1e-8) #ranges = [(np.log(args_closed[k])-1,np.log(args_closed[k])+1) for k in range(4)] #out = opt.brute(cost_function,ranges,Ns=100,args=(assumptions,),workers=-1) args_cav = np.exp(out.x) DelN_cav = DelN(args_cav, assumptions) qR_adj = args_cav[2] r2 = kappa_eff * w0(DelN_cav) / (assumptions['gamma'] * omega_eff * args_cav[0]) eps_d = (assumptions['omega'] + assumptions['muc'] * args_cav[1] / assumptions['gamma']) / (assumptions['muc'] * assumptions['l'] * args_cav[1] / assumptions['gamma']) - 1 fun = out.fun #/np.sum(args0**2) k += 1 if fun > eps: args_cav = [np.nan, np.nan, np.nan] fun = np.nan N = args_cav[1] qN = N**2 * y(DelN_cav) sigd = np.sqrt(assumptions['sigw']**2 + assumptions['sigc']**2 * qN) sigp = assumptions['l'] * assumptions['sigD'] * np.sqrt( qR_adj * (assumptions['sigc']**2 * qN + assumptions['muc']**2 * N**2) / assumptions['gamma']) sigN = np.sqrt(assumptions['sigm']**2 + ( (1 - assumptions['l']) * assumptions['sigc'] * assumptions['mug'])**2 * qR_adj) chi = r2 * (1 - 2 * r2 + eps_d - 6 * r2 * eps_d) * assumptions['gamma']**2 / ( assumptions['l'] * assumptions['muc'] * N * w0(DelN_cav)) fN = ((1 - assumptions['l']) * assumptions['mug'] * chi * assumptions['sigc']**2 * args_cav[0])**(-1) if postprocess: results_num = { 'SphiN': np.sum(TestPlate.N.values.reshape(-1) > cutoff) * 1. / trials, 'M<R>': M * args0[0], 'S<N>': S * args0[1], 'MqR': M * args0[2] } results_cav = { 'SphiN': S * w0(DelN_cav), 'M<R>': M * args_cav[0], 'S<N>': S * args_cav[1], 'MqR': M * args_cav[2], 'sigd': sigd, 'sigp': sigp, 'sigN': sigN, 'eps_d': eps_d, 'r2': r2, 'chi': chi, 'fN': fN } if assumptions['single']: results_closed = { 'SphiN': S * w0(DelN_closed), 'M<R>': M * args_closed[0], 'S<N>': S * args_closed[1], 'MqR': M * args_closed[2] } else: results_closed = np.nan return results_num, results_cav, results_closed, out, args0, assumptions, TestPlate else: data = pd.DataFrame([args_cav], columns=['<R>', '<N>', '<R^2>'], index=[run_number]) data['fun'] = fun for item in assumptions.keys(): data[item] = assumptions[item] data['S'] = S data['M'] = M data['phiN'] = w0(DelN_cav) data['<N^2>'] = qN data['sigd'] = sigd data['sigp'] = sigp data['sigN'] = sigN data['fN'] = fN data['chi'] = chi data['eps_d'] = eps_d data['r2'] = r2 data_sim = pd.DataFrame([args0], columns=['<R>', '<N>', '<R^2>'], index=[run_number]) data_sim['phiN'] = np.mean((TestPlate.N > cutoff).sum(axis=0)) / S data_sim['<N^2>'] = np.mean( (Stot * 1. / S) * (TestPlate.N**2).mean(axis=0)) err_sim = pd.DataFrame([args0_err], columns=['<R>', '<N>', '<R^2>'], index=[run_number]) err_sim['phiN'] = np.std((TestPlate.N > cutoff).sum(axis=0)) / S err_sim['<N^2>'] = np.std( (Stot * 1. / S) * (TestPlate.N**2).mean(axis=0)) data = data.join(data_sim, rsuffix='_sim').join(err_sim, rsuffix='_sim_err') if assumptions['single']: data_closed = pd.DataFrame([args_closed], columns=['<R>', '<N>', '<R^2>'], index=[run_number]) data_closed['phiN'] = w0(DelN_closed) data_closed['<N^2>'] = y(DelN_closed) * args_closed[1]**2 data = data.join(data_closed, rsuffix='_closed') return data
init_state = [N0, R0] #Make parameter list m = 1 + 0.01 * np.random.randn(len(c)) params = [{ 'w': 1, 'g': 1, 'l': 0.8, 'R0': R0.values[:, k], 'r': 1., 'tau': 1 } for k in range(len(N0.T))] for k in range(len(params)): params[k]['c'] = c params[k]['D'] = D params[k]['m'] = m HMP = Community(init_state, dynamics, params) HMP.metadata = pd.DataFrame(['Site 1'] * n_samples + ['Site 2'] * n_samples + ['Site 3'] * n_samples, index=N0.T.index, columns=['Environment']) HMP.metadata['alpha'] = np.asarray(list(alpha) * 3) HMP.SteadyState(plot=False, tol=1e-3, verbose=False) with open(filename('comm', exp, HMP_protocol['S']), 'wb') as f: pickle.dump([HMP.N, HMP.R, params[0], R0, HMP.metadata], f) HMP.N.to_csv(filename('N', exp, HMP_protocol['S'])) HMP.metadata.to_csv(filename('m', exp, HMP_protocol['S'])) #############All external resources#################### exp = 'Complex environments' R0 = np.zeros(np.shape(R0)) for k in range(3):
m0 = 0.5+0.01*np.random.randn(len(c)) N = {} metadata = {} ### Crossfeeding, one external resource exp = 'Simple environment' params_EMP=[{'c':c, 'm':m0+10*np.random.rand(), 'w':1, 'D':D, 'g':1, 'l':0.8, 'R0':R0.values[:,k], 'tau':1 } for k in range(len(N0.T))] EMP = Community(init_state,dynamics,params_EMP) metadata = pd.DataFrame(np.asarray([np.mean(item['m']) for item in params_EMP]),index=N0.T.index,columns=['m']) EMP.SteadyState() EMP.N.to_csv(filename('N',exp,EMP_protocol['S'])) metadata.to_csv(filename('m',exp,EMP_protocol['S'])) with open(filename('comm',exp,EMP_protocol['S']),'wb') as f: pickle.dump([EMP.N,EMP.R,params_EMP[0],R0,metadata],f) ### Crossfeeding, all external resources exp = 'Complex environment' params_EMP=[{'c':c, 'm':m0+10*np.random.rand(), 'w':1, 'D':D, 'g':1, 'l':0.8,
init_state = [N0, R0] #Make parameter list m = 1 + 0.01 * np.random.randn(len(c)) params = [{ 'w': 1, 'g': 1, 'l': 0.8, 'R0': R0.values[:, k], 'r': 1., 'tau': 1 } for k in range(len(N0.T))] for k in range(len(params)): params[k]['c'] = c params[k]['D'] = D params[k]['m'] = m HMP = Community(init_state, dynamics, params) HMP.metadata = pd.DataFrame(['Env. 1'] * n_samples + ['Env. 2'] * n_samples + ['Env. 3'] * n_samples, index=N0.T.index, columns=['Environment']) ###############GET STEADY STATE AND SAVE##################### HMP.SteadyState(plot=False, tol=1e-3, verbose=False) with open(folder + 'HMP_env_family.dat', 'wb') as f: pickle.dump([HMP.N, HMP.R, params[0], R0, HMP.metadata], f) ###############REDO WITHOUT FAMILY STRUCTURE##################### HMP_protocol.update({ 'SA': 6 * 800 + 200, #Number of species in each family 'MA': 6 * 50, #Number of resources of each type 'Sgen': 0, #Number of generalist species