def update_typical_vals(networks, int_times, rtol = 1e-9, fraction=1.0, cutoff=1e-14): """ Update the typical var values for a group of networks. Find the maximum of each variable over the integrations. In each network the typical value is set to fraction of that maximum. If that maximum value is less than cutoff, the typical value is set to 1. networks List of networks to work with int_times List of corresponding integration endpoints (ie. [(0, 100), (0, 50)]) fraction Relative size of typical value, compared to max value over the integrations. rtol Relative tolerance for the integrations. cutoff Values below this are considered to be zero """ max_vals = {} for net, times in zip(networks, int_times): traj = Dynamics.integrate(net, times, rtol=rtol, fill_traj=True) for var_id in list(net.variables.keys()): curr_max = max(traj.get_var_traj(var_id)) max_vals[var_id] = max(curr_max, max_vals.get(var_id, 0)) for var_id, val in list(max_vals.items()): for net in networks: if var_id in net.variables: if val > cutoff: net.set_var_typical_val(var_id, val*fraction) else: net.set_var_typical_val(var_id, 1.0) return max_vals
def discrete_data(net, params, pts, interval, vars=None, random=False, uncert_func=typ_val_uncert(0.1, 1e-14)): """ Return a set of data points for the given network generated at the given parameters. net Network to generate data for params Parameters for this evaluation of the network pts Number of data points to output interval Integration interval vars Variables to output data for, defaults to all species in net random If False data points are distributed evenly over interval If True they are spread randomly and uniformly over each variable uncert_func Function that takes in a trajectory and a variable id and returns what uncertainty should be assumed for that variable, either as a scalar or a list the same length as the trajectory. """ # Default for vars if vars is None: vars = list(net.species.keys()) # Assign observed times to each variable var_times = {} for var in vars: if random: var_times[var] = np.random.rand(pts) * (interval[1]-interval[0]) + interval[0] else: var_times[var] = np.linspace(interval[0], interval[1], pts) # Create a sorted list of the unique times in the var_times dict int_times = set(np.ravel(list(var_times.values()))) int_times.add(0) int_times = list(int_times) int_times.sort() # Get the trajectory traj = Dynamics.integrate(net, int_times, params=params, fill_traj=False) # Build up our data dictionary data = {} for var, times in list(var_times.items()): var_data = {} data[var] = var_data # Calculate our uncertainties var_uncerts = uncert_func(traj, var) for time in times: val = traj.get_var_val(var, time) if np.isscalar(var_uncerts): uncert = var_uncerts else: index = traj._get_time_index(time) uncert = var_uncerts[index] var_data[time] = (val, uncert) return data
def few_ensemble_trajs(net, times, elements): import SloppyCell.ReactionNetworks.Dynamics as Dynamics traj_set = [] for params in elements: try: traj = Dynamics.integrate(net, times, params=params, fill_traj=False) if not scipy.any(scipy.isnan(traj.values)): traj_set.append(traj) except Utility.SloppyCellException: logger.warn('Exception in network integration on node %i.' % my_rank) return traj_set
def few_ensemble_trajs(net, times, elements): import SloppyCell.ReactionNetworks.Dynamics as Dynamics traj_set = [] for params in elements: try: traj = Dynamics.integrate(net, times, params=params, fill_traj=False) if not scipy.any(scipy.isnan(traj.values)): traj_set.append(traj) except Utility.SloppyCellException: logger.warning('Exception in network integration on node %i.' % my_rank) return traj_set
def integrate(net, times, p=None, x0=None, atol=None, rtol=None, varids=None): """A wrapper of SloppyCell.ReactionNetworks.Dynamics.integrate, where two previously nonintuitive behaviors are fixed here: 1) x(t0) always falsely takes the value of x(t1), where t0 and t1 are the first and second time points in the return traj; 2) when the argument times does not start from 0, the integration goes from 0 to tmax-tmin. :param net: :param times: list or tuple :param atol: :param rtol: :param varids: """ if isinstance(times, tuple): intermediate_output = True else: intermediate_output = False t0 = times[0] if t0 == 0: _times = times else: _times = [0] + list(times) if p is not None: net.p = p if x0 is None: x0 = net.x0 if atol is None: atol = ATOL if rtol is None: rtol = RTOL if not hasattr(net, 'res_function'): net.compile() """ out = daskr.daeint(res=net.res_function, t=_times, y0=x0.copy(), yp0=[0]*net.xdim, atol=[atol]*net.xdim, rtol=[rtol]*net.xdim, intermediate_output=intermediate_output, rpar=net.constants) traj = out[0] traj[0] = x0 times = out[1] """ # Use SloppyCell.ReactionNetworks.Dynamics.integrate for now as it wraps # around SloppyCell.daskr and is somehow more stable than daskr itself # but not much slower. # It automatically updates net.x hence no need to manually update x. out = Dynamics.integrate(net, _times, atol=[atol] * net.xdim, rtol=[rtol] * net.xdim, fill_traj=intermediate_output) if varids is not None: out = out.copy_subset(varids) traj = out.values times = out.timepoints varids = out.key_column.keys() if t0 != 0: idx_t0 = list(times).index(t0) times = times[idx_t0:] traj = traj[idx_t0:] return Trajectory(traj, index=pd.Index(times, name='time'), columns=varids)