def write_file(fim, base, suffix, param_type, coarse_grain_type, can): """Write to fim file. """ success = True overwrite = True name = base dry_run = False if can: fname = f'cache/c_elegans/can_{param_type}3/{coarse_grain_type}/{base}_fim{suffix}.p' else: fname = f'cache/c_elegans/{param_type}3/{coarse_grain_type}/{base}_fim{suffix}.p' if dry_run: print('Dry run...') print(f'Saving {fname}') else: print(f'Saving {fname}...') if os.path.isfile(fname): ofim = pickle.load(open(fname,'rb'))['fim'] if np.array_equal(ofim, fim): print(f'File with same FIM already exists.') else: save_pickle(['fim','success','name'], fname, overwrite) print('Saved.') else: save_pickle(['fim','success','name'], fname, overwrite) print('Saved.')
def mft_cutoff(nu=2.5, nForests=30): """Simulation of symmetric competition with changing cutoff modulated by adjusting basal metabolic rate coefficient. Parameters ---------- nu : float, 2.5 Fatal fluctuation exponent. nForests : int, 30 Number of indpt forests to run. """ basalRange = np.array([.8, .2, .05, .0125, .003125]) # basal metabolism coeff # set up r0 = 1 # smallest stem radius Abar = .5 # death rate coeff cg = .3 # growth rate coeff rRange = np.linspace(r0, 400, 2000) g0 = 500 # sapling introduction rate L = 200 # length of boundary sampleSize = 5000 # number of samples to take dt = .1 # sim time step nk = {} # no. of trees per size class k (pop. no.) t = {} # time rk = {} # radius of size class k # run forest sim over basal metabolism coeff for basal in basalRange: forest = Forest2D(L, g0, rRange, { 'root': 1, 'canopy': 1, 'grow': cg, 'death': Abar, 'area competition': 1, 'basal': basal, 'sharing fraction': .5, 'resource efficiency': 2, 'dep death rate': 1 }, nu=nu) forest.check_dt(dt) nk[basal], t[basal], rk[basal] = forest.sample(sampleSize, dt, sample_dt=.25, n_forests=nForests) save_pickle([ 'nk', 't', 'rk', 'forest', 'r0', 'g0', 'nu', 'Abar', 'basalRange', 'cg', 'dt' ], f'cache/biomass_scaling_w_compet_{nu=}.p', True) print(f'Done with {basal=}.')
def mft_cutoff_plot(nu=2.5): """Forest examples for showing individual plots. Parameters ---------- nu : float, 2.5 Fatal fluctuation exponent. """ basalRange = np.array([.8, .2, .05, .0125, .003125]) # set up r0 = 1 # smallest stem radius Abar = .5 # death rate coeff cg = .3 # growth rate coeff rRange = np.linspace(r0, 400, 2000) g0 = 500 # sapling introduction rate L = 200 # length of boundary sampleSize = 4000 # number of samples to take dt = .1 # sim time step def loop_wrapper(basal): forest = Forest2D(L, g0, rRange, { 'root': 1, 'canopy': 1, 'grow': cg, 'death': Abar, 'area competition': 1, 'basal': basal, 'sharing fraction': .5, 'resource efficiency': 2, 'dep death rate': 1 }, nu=nu) forest.check_dt(dt) forest.sample(sampleSize, dt, sample_dt=.25) return forest with Pool(basalRange.size) as pool: forest = dict(zip(basalRange, pool.map(loop_wrapper, basalRange))) save_pickle(['forest'], f'plotting/biomass_scaling_w_compet_{nu=}.p', True)
def WEB_transience(): """Show moving cutoff once starting from an empty plot for the simple WEB compartment model. """ # Set up common parameters g0 = 1000 L = 10 nSample = 200 nForests = 40 cm = .5 cg = .3 dt = .005 # Thin bins to show alignment between prediction and theory. # set up rRange = np.linspace(1, 500, 5000) forest = Forest2D(L, g0, rRange, {'root': 1, 'grow': cg, 'death': cm}) nk, t, rk = forest.sample(nSample, dt, n_forests=nForests) save_pickle([ 'rRange', 'g0', 'L', 'nSample', 'cm', 'cg', 'dt', 't', 'nk', 'rk', 'forest' ], 'cache/linear_model_exponent_transience.p', True) # Thick bins to show deviations at small r. # set up rRange = np.linspace(1, 500, 500) forest = Forest2D(L, g0, rRange, {'root': 1, 'grow': cg, 'death': cm}) nk, t, rk = forest.sample(nSample, dt, n_forests=nForests) save_pickle([ 'rRange', 'g0', 'L', 'nSample', 'cm', 'cg', 'dt', 't', 'nk', 'rk', 'forest' ], 'cache/linear_model_exponent_transience_wide_bins.p', True)
def hex_packing(): """Hexagonal packing emerging from strong rate competition. The results from this can be used to generate Figure 4D. """ from .nearest_neighbor import pair_correlation # this section for showing the spatial distributions areaDeathRateRange = np.logspace(-1, 3, 10) # keys to dicts in xy # set up r0 = 1 basal = 0 rRange = np.linspace(r0, 5, 5) # growth saturates at max radius of 5 g0 = 100 # incoming sapling rate L = 200 # system length burnIn = 1_000 # time steps to ignore sampleSize = 1_000 dt = .2 coeffs = { 'root': 10, 'death': 0, 'grow': .3, 'area competition': 1, 'basal': basal, 'sharing fraction': 1, 'resource efficiency': 2 } def loop_wrapper(deathRate): coeffs['dep death rate'] = deathRate forest = Forest2D(L, g0, rRange, coeffs) forest.check_dt(dt) # burn in and run sim forest.sample(2, dt=dt, sample_dt=burnIn) nk, t, rk, trees = forest.sample(sampleSize, dt=dt, sample_dt=10, return_trees=True) # get tree coordinates xy = [ np.vstack([tree.xy for tree in thisTrees]) for thisTrees in trees ] print(f'Done with {deathRate=:.2f}.') return xy, nk with threadpool_limits(user_api='blas', limits=1): with Pool(cpu_count() - 1) as pool: xy_, nk_ = list(zip(*pool.map(loop_wrapper, areaDeathRateRange))) xy = dict(zip(areaDeathRateRange, xy_)) nk = dict(zip(areaDeathRateRange, nk_)) save_pickle([ 'areaDeathRateRange', 'r0', 'basal', 'rRange', 'g0', 'L', 'burnIn', 'sampleSize', 'dt', 'coeffs', 'xy', 'nk' ], 'cache/packing_example.p', True) # this section for plotting the correlation fcn allxy = xy p = {} bins = np.linspace( 0, 5, 40) # this should be roughly aligned with the stats of the system for adr in areaDeathRateRange: # fix natural mortality and titrate strength of competition xy = allxy[adr] # iterate through each random plot thisp = [] r = [] for xy_ in xy: p_, r_ = pair_correlation(np.vstack(xy_), bins, (50, 50, 100, 100)) thisp.append(p_) r.append(r_) p[adr] = np.vstack(thisp).mean(0) r = r[0] # x-axis, radial distance save_pickle(['p', 'r'], 'plotting/spatial_correlation.p')
def phase_space_scan_abar(): """Scanning across varying growth rate fixing natural mortality rate to 0 as in Figure 4. """ # for showing the spatial distributions cgRange = np.logspace(np.log10(.5), -4, 4) # growth rate coeff areaDeathRateRange = np.logspace(-1, 2, 10) # comp attrition rate coeff # set up r0 = 1 # sapling radius Abar = 0. # death rate coeff basal = .05 # basal met rate coeff rRange = np.linspace(r0, 400, 800) # growth saturates at radius=400 g0 = 100 L = 200 burnIn = 400 sampleSize = 100 dt = .1 coeffs = { 'root': 10, 'death': Abar, 'area competition': 1, 'basal': basal, 'sharing fraction': 1, 'resource efficiency': 2 } # loop over sim parameters def loop_cg(cg): coeffs['grow'] = cg def loop_wrapper(deathRate): coeffs['dep death rate'] = deathRate forest = Forest2D(L, g0, rRange, coeffs) forest.check_dt(dt) # burn in and run sim if deathRate > 1: forest.sample(2, dt=dt, sample_dt=burnIn + 1600) else: forest.sample(2, dt=dt, sample_dt=burnIn) nk, t, rk, trees = forest.sample(sampleSize, dt=dt, sample_dt=10, return_trees=True) # get tree coordinates xy = [ np.vstack([tree.xy for tree in thisTrees]) for thisTrees in trees ] print(f'Done with {deathRate=:.2f}.') return xy, nk with threadpool_limits(user_api='blas', limits=1): with Pool(cpu_count() - 1) as pool: xy_, nk_ = list( zip(*pool.map(loop_wrapper, areaDeathRateRange))) xy = dict(zip(areaDeathRateRange, xy_)) nk = dict(zip(areaDeathRateRange, nk_)) return xy, nk xy = {} # loop over mortality rates nk = {} # pop. number (can be used for equilibrium check) for cg in cgRange: xy[cg], nk[cg] = loop_cg(cg) save_pickle([ 'cgRange', 'areaDeathRateRange', 'r0', 'cg', 'basal', 'rRange', 'g0', 'L', 'burnIn', 'sampleSize', 'dt', 'coeffs', 'xy', 'nk' ], f'cache/spacing_with_cg.p', True) print(f'Done with {cg=}.') print('')
def phase_space_scan_Abar(): """Scanning across natural mortality rate Abar as in Figure 4. """ AbarRange = np.linspace(.75, 0, 5) # death rate coeff areaDeathRateRange = np.logspace(-1, 2, 10) # comp attrition rate coeff # set up r0 = 1 # sapling basal stem radius cg = .3 # growth coeff nu = 2. # fatal fluc exponent basal = .05 # basal met coeff rRange = np.linspace(r0, 800, 1600) # growth saturates g0 = 100 L = 200 burnIn = 400 sampleSize = 100 dt = .1 coeffs = { 'root': 10, 'canopy': 1, 'grow': cg, 'area competition': 1, 'basal': basal, 'sharing fraction': 1, 'resource efficiency': 2 } # loop over natural mortality rate def loop_Abar(Abar): coeffs['death'] = Abar # loop over competitive death rate def loop_wrapper(deathRate): coeffs['dep death rate'] = deathRate forest = Forest2D(L, g0, rRange, coeffs, nu=nu) forest.check_dt(dt) # burn in and run sim if Abar < .38 and deathRate > 1: # long time to converge in this regime if Abar < .2: forest.sample(burnIn + 1000, dt=dt, sample_dt=sampleSize * dt) else: forest.sample(burnIn + 400, dt=dt, sample_dt=sampleSize * dt) else: forest.sample(burnIn, dt=dt, sample_dt=sampleSize * dt) nk, t, rk, trees = forest.sample(sampleSize, dt=dt, sample_dt=10, return_trees=True) # get tree coordinates xy = [ np.vstack([tree.xy for tree in thisTrees]) for thisTrees in trees ] print(f'Done with {deathRate=:.3f}.') return xy, nk with threadpool_limits(user_api='blas', limits=1): with Pool(cpu_count() - 1) as pool: xy_, nk_ = list( zip(*pool.map(loop_wrapper, areaDeathRateRange))) xy = dict(zip(areaDeathRateRange, xy_)) nk = dict(zip(areaDeathRateRange, nk_)) return xy, nk xy = {} # tree xy coord, indexed by Abar and areadeathrate nk = {} # pop. number (can be used for equilibrium check) for Abar in AbarRange: xy[Abar], nk[Abar] = loop_Abar(Abar) save_pickle([ 'AbarRange', 'areaDeathRateRange', 'r0', 'cg', 'nu', 'basal', 'rRange', 'g0', 'L', 'burnIn', 'sampleSize', 'dt', 'coeffs', 'xy', 'nk' ], 'cache/phase_space_scan_Abar.p', True) print(f'Done with {Abar=}.') print('')
def mft_cutoff_finite_size_checks(nu=2.5, run_smaller=True, run_larger=True): """Simulation of symmetric competition with changing cutoff modulated by adjusting basal metabolic rate coefficient. This allows for a factor of 16 difference in area generated by .mft_cutoff() to test for finite size effects. Parameters ---------- nu : float, 2.5 run_smaller : bool, True run_larger : bool, True """ basalRange = np.array([.8, .2, .05, .0125, .003125]) # basal met coeff # set up r0 = 1 # sapling radius Abar = .5 # natural mortality coeff cg = .3 # growth coeff rRange = np.linspace(r0, 400, 2000) # radii of size classes sampleSize = 5000 # no. of samples to take dt = .1 # time step size nForests = 30 # no. of random forests to sim. # smaller system if run_smaller: g0 = 500 / 4 # incoming sapling rate L = 200 / 2 # system length nk = {} # pop no. by size class k t = {} # time rk = {} # radiuso f size class k for basal in basalRange: forest = Forest2D(L, g0, rRange, { 'root': 1, 'canopy': 1, 'grow': cg, 'death': Abar, 'area competition': 1, 'basal': basal, 'sharing fraction': .5, 'resource efficiency': 2, 'dep death rate': 1 }, nu=nu) forest.check_dt(dt) nk[basal], t[basal], rk[basal] = forest.sample(sampleSize, dt, sample_dt=.25, n_forests=nForests) save_pickle([ 'nk', 't', 'rk', 'forest', 'r0', 'g0', 'nu', 'Abar', 'basalRange', 'cg' ], f'cache/biomass_scaling_w_compet_smaller_{nu=}.p', True) print(f'Done with {basal=}.') # larger system if run_larger: g0 = 500 * 4 L = 200 * 2 nk = {} t = {} rk = {} for basal in basalRange: forest = Forest2D(L, g0, rRange, { 'root': 1, 'canopy': 1, 'grow': cg, 'death': Abar, 'area competition': 1, 'basal': basal, 'sharing fraction': .5, 'resource efficiency': 2, 'dep death rate': 1 }, nu=nu) forest.check_dt(dt) nk[basal], t[basal], rk[basal] = forest.sample(sampleSize, dt, sample_dt=.25, n_forests=nForests) save_pickle([ 'nk', 't', 'rk', 'forest', 'r0', 'g0', 'nu', 'Abar', 'basalRange', 'cg' ], f'cache/biomass_scaling_w_compet_larger_{nu=}.p', True) print(f'Done with {basal=}.')