def update_typical_vals(networks, int_times, rtol = 1e-9, fraction=1.0, cutoff=1e-14): """ Update the typical var values for a group of networks. Find the maximum of each variable over the integrations. In each network the typical value is set to fraction of that maximum. If that maximum value is less than cutoff, the typical value is set to 1. networks List of networks to work with int_times List of corresponding integration endpoints (ie. [(0, 100), (0, 50)]) fraction Relative size of typical value, compared to max value over the integrations. rtol Relative tolerance for the integrations. cutoff Values below this are considered to be zero """ max_vals = {} for net, times in zip(networks, int_times): traj = Dynamics.integrate(net, times, rtol=rtol, fill_traj=True) for var_id in list(net.variables.keys()): curr_max = max(traj.get_var_traj(var_id)) max_vals[var_id] = max(curr_max, max_vals.get(var_id, 0)) for var_id, val in list(max_vals.items()): for net in networks: if var_id in net.variables: if val > cutoff: net.set_var_typical_val(var_id, val*fraction) else: net.set_var_typical_val(var_id, 1.0) return max_vals
def get_s_integration(net, p=None, Tmin=None, Tmax=None, k=None, tol=None, to_ser=False): """ """ if p is not None: net.update_optimizable_vars(p) if Tmin is None: Tmin = TMIN if Tmax is None: Tmax = TMAX if k is None: k = K if tol is None: tol = TOL_SS nsp = len(net.dynamicVars) tmin, tmax = 0, Tmin x0 = net.x0.copy() constants = net.constantVarValues while tmax <= Tmax: # yp0 helps integration stability yp0 = Dynamics.find_ics(net.x0, net.x0, tmin, net._dynamic_var_algebraic, [1e-6] * nsp, [1e-6] * nsp, constants, net)[1] # using daskr to save computational overhead out = daskr.daeint(res=net.res_function, t=[tmin, tmax], y0=x0, yp0=yp0, atol=[1e-6] * nsp, rtol=[1e-6] * nsp, intermediate_output=False, rpar=constants, max_steps=100000.0, max_timepoints=100000.0, jac=net.ddaskr_jac) xt = out[0][-1] dxdt = net.res_function(tmax, xt, [0] * nsp, constants) if np.max(np.abs(dxdt)) < tol: net.updateVariablesFromDynamicVars(xt, tmax) net.t = tmax if to_ser: return butil.Series(xt, net.xids) else: return xt else: tmin, tmax = tmax, tmax * k x0 = xt raise Exception("Cannot reach steady state for p=%s" % p)
def discrete_data(net, params, pts, interval, vars=None, random=False, uncert_func=typ_val_uncert(0.1, 1e-14)): """ Return a set of data points for the given network generated at the given parameters. net Network to generate data for params Parameters for this evaluation of the network pts Number of data points to output interval Integration interval vars Variables to output data for, defaults to all species in net random If False data points are distributed evenly over interval If True they are spread randomly and uniformly over each variable uncert_func Function that takes in a trajectory and a variable id and returns what uncertainty should be assumed for that variable, either as a scalar or a list the same length as the trajectory. """ # Default for vars if vars is None: vars = list(net.species.keys()) # Assign observed times to each variable var_times = {} for var in vars: if random: var_times[var] = np.random.rand(pts) * (interval[1]-interval[0]) + interval[0] else: var_times[var] = np.linspace(interval[0], interval[1], pts) # Create a sorted list of the unique times in the var_times dict int_times = set(np.ravel(list(var_times.values()))) int_times.add(0) int_times = list(int_times) int_times.sort() # Get the trajectory traj = Dynamics.integrate(net, int_times, params=params, fill_traj=False) # Build up our data dictionary data = {} for var, times in list(var_times.items()): var_data = {} data[var] = var_data # Calculate our uncertainties var_uncerts = uncert_func(traj, var) for time in times: val = traj.get_var_val(var, time) if np.isscalar(var_uncerts): uncert = var_uncerts else: index = traj._get_time_index(time) uncert = var_uncerts[index] var_data[time] = (val, uncert) return data
def integrate_sensitivity(net, times, p=None, x0=None, rtol=None, varids=None): """ :param net: :param times: list or tuple :param atol: :param rtol: :param varids: """ if isinstance(times, tuple): intermediate_output = True else: intermediate_output = False t0 = times[0] if t0 == 0: _times = times else: _times = [0] + list(times) if p is not None: net.p = p if x0 is None: x0 = net.x0 if rtol is None: rtol = RTOL if not hasattr(net, 'res_function'): net.compile() out = Dynamics.integrate_sensitivity(net, _times, rtol=[rtol] * net.xdim, fill_traj=intermediate_output) if varids is not None: if set(varids) < set(net.varids): varids = itertools.product(varids, net.pids) out = out.copy_subset(varids) traj = out.values times = out.timepoints varids = out.key_column.keys() if t0 != 0: idx_t0 = list(times).index(t0) times = times[idx_t0:] traj = traj[idx_t0:] traj = Trajectory(traj, index=pd.Index(times, name='time'), columns=varids) return traj
def make_sens_traj(calcobject,params,times,senstrajfilename): """ Make the sensitivity trajectory for the calculation calcoject (same as in setup(...) above). params: parameters as a KeyedList, sensitivity traj is calculated at these parameters (should be same as in paramfile in setup(...) above) times: the timepoints in the sensitivity trajectory (1-d array) senstrajfilename: the file to save the sensitivity trajectory to Note that if times is very finely spaced, the sensitivity trajectory will need a lot of storage space """ senstraj = Dynamics.integrate_sensitivity(calcobject, times, params, 1.0e-6) save(senstraj,senstrajfilename)
def few_ensemble_trajs(net, times, elements): import SloppyCell.ReactionNetworks.Dynamics as Dynamics traj_set = [] for params in elements: try: traj = Dynamics.integrate(net, times, params=params, fill_traj=False) if not scipy.any(scipy.isnan(traj.values)): traj_set.append(traj) except Utility.SloppyCellException: logger.warn('Exception in network integration on node %i.' % my_rank) return traj_set
def make_sens_traj(calcobject, params, times, senstrajfilename): """ Make the sensitivity trajectory for the calculation calcoject (same as in setup(...) above). params: parameters as a KeyedList, sensitivity traj is calculated at these parameters (should be same as in paramfile in setup(...) above) times: the timepoints in the sensitivity trajectory (1-d array) senstrajfilename: the file to save the sensitivity trajectory to Note that if times is very finely spaced, the sensitivity trajectory will need a lot of storage space """ senstraj = Dynamics.integrate_sensitivity(calcobject, times, params, 1.0e-6) save(senstraj, senstrajfilename)
def few_ensemble_trajs(net, times, elements): import SloppyCell.ReactionNetworks.Dynamics as Dynamics traj_set = [] for params in elements: try: traj = Dynamics.integrate(net, times, params=params, fill_traj=False) if not scipy.any(scipy.isnan(traj.values)): traj_set.append(traj) except Utility.SloppyCellException: logger.warning('Exception in network integration on node %i.' % my_rank) return traj_set
def get_s(net, p=None, method=None, Tmin=None, Tmax=None, k=None, x0=None, tol=None, to_ser=False, **kwargs_rootfinding): """ Input: method: Tmin, Tmax, k: x0: """ if p is not None: net.update_optimizable_vars(p) if method is None: method = METHOD if Tmin is None: Tmin = TMIN if method == 'integration': return get_s_integration(net, Tmin=Tmin, Tmax=Tmax, k=k, tol=tol, to_ser=to_ser) elif method == 'rootfinding': return get_s_rootfinding(net, x0=x0, tol=tol, to_ser=to_ser, **kwargs_rootfinding) elif method == 'mixed': nsp = len(net.dynamicVars) constants = net.constantVarValues yp0 = Dynamics.find_ics(net.x0, net.x0, 0, net._dynamic_var_algebraic, [1e-6] * nsp, [1e-6] * nsp, constants, net)[1] out = daskr.daeint(res=net.res_function, t=[0, Tmin], y0=net.x0.copy(), yp0=yp0, atol=[1e-6] * nsp, rtol=[1e-6] * nsp, intermediate_output=False, rpar=constants, max_steps=100000.0, max_timepoints=100000.0, jac=net.ddaskr_jac) xt = out[0][-1] return get_s_rootfinding(net, x0=xt, tol=tol, to_ser=to_ser, **kwargs_rootfinding) else: raise ValueError("Unrecognized value for method: %s" % method)
def integrate(net, times, p=None, x0=None, atol=None, rtol=None, varids=None): """A wrapper of SloppyCell.ReactionNetworks.Dynamics.integrate, where two previously nonintuitive behaviors are fixed here: 1) x(t0) always falsely takes the value of x(t1), where t0 and t1 are the first and second time points in the return traj; 2) when the argument times does not start from 0, the integration goes from 0 to tmax-tmin. :param net: :param times: list or tuple :param atol: :param rtol: :param varids: """ if isinstance(times, tuple): intermediate_output = True else: intermediate_output = False t0 = times[0] if t0 == 0: _times = times else: _times = [0] + list(times) if p is not None: net.p = p if x0 is None: x0 = net.x0 if atol is None: atol = ATOL if rtol is None: rtol = RTOL if not hasattr(net, 'res_function'): net.compile() """ out = daskr.daeint(res=net.res_function, t=_times, y0=x0.copy(), yp0=[0]*net.xdim, atol=[atol]*net.xdim, rtol=[rtol]*net.xdim, intermediate_output=intermediate_output, rpar=net.constants) traj = out[0] traj[0] = x0 times = out[1] """ # Use SloppyCell.ReactionNetworks.Dynamics.integrate for now as it wraps # around SloppyCell.daskr and is somehow more stable than daskr itself # but not much slower. # It automatically updates net.x hence no need to manually update x. out = Dynamics.integrate(net, _times, atol=[atol] * net.xdim, rtol=[rtol] * net.xdim, fill_traj=intermediate_output) if varids is not None: out = out.copy_subset(varids) traj = out.values times = out.timepoints varids = out.key_column.keys() if t0 != 0: idx_t0 = list(times).index(t0) times = times[idx_t0:] traj = traj[idx_t0:] return Trajectory(traj, index=pd.Index(times, name='time'), columns=varids)