class TrustConstr(SciPyInterface): """Class from scipy for optimization with trust-constr as method for the solver. This class is a wrapper for the method trust-constr from the optimization suite of the scipy interface. It defines the solver options in the local variable options as a dictionary and implements the abstract method run for running the optimization. """ gtol = UnsignedFloat(default=1e-6) xtol = UnsignedFloat(default=1e-8) barrier_tol = UnsignedFloat(default=1e-8) initial_constr_penalty = UnsignedFloat(default=1.0) initial_tr_radius = UnsignedFloat(default=1.0) initial_barrier_parameter = UnsignedFloat(default=0.01) initial_barrier_tolerance = UnsignedFloat(default=0.01) factorization_method = None maxiter = UnsignedInteger(default=1000) verbose = UnsignedInteger(default=0) disp = Bool(default=False) _options = [ 'gtol', 'xtol', 'barrier_tol', 'finite_diff_rel_step', 'initial_constr_penalty', 'initial_tr_radius', 'initial_barrier_parameter', 'initial_barrier_tolerance', 'factorization_method', 'maxiter','verbose', 'disp' ] jac = Switch(default='3-point', valid=['2-point', '3-point', 'cs']) def __str__(self): return 'trust-constr'
class NelderMead(SciPyInterface): """ """ maxiter = UnsignedInteger() maxfev = UnsignedInteger() initial_simplex = None xatol = UnsignedFloat(default=0.01) fatol = UnsignedFloat(default=0.01) adaptive = Bool(default=True) _options = [ 'maxiter', 'maxfev', 'initial_simplex', 'xatol', 'fatol', 'adaptive' ]
class WenoParameters(ParametersGroup): """Class for defining the disrectization_weno_parameters Defines several parameters as UnsignedInteger, UnsignedFloat and save their names into a list named parameters. See also -------- ParametersGroup """ boundary_model = UnsignedInteger(default=0, ub=3) weno_eps = UnsignedFloat(default=1e-10) weno_order = UnsignedInteger(default=3, ub=3) _parameters = ['boundary_model', 'weno_eps', 'weno_order']
class SolverParametersGroup(ParametersGroup): """Class for defining the solver parameters for cadet. See also -------- ParametersGroup """ nthreads = UnsignedInteger(default=1) consistent_init_mode = UnsignedInteger(default=1, ub=7) consistent_init_mode_sens = UnsignedInteger(default=1, ub=7) _parameters = [ 'nthreads', 'consistent_init_mode', 'consistent_init_mode_sens' ]
class ModelSolverParametersGroup(ParametersGroup): """Class for defining the model_solver_parameters. Defines several parameters as UnsignedInteger with default values and save their names into a list named parameters. See also -------- ParametersGroup """ GS_TYPE = UnsignedInteger(default=1, ub=1) MAX_KRYLOV = UnsignedInteger(default=0) MAX_RESTARTS = UnsignedInteger(default=10) SCHUR_SAFETY = UnsignedFloat(default=1e-8) _parameters = ['GS_TYPE', 'MAX_KRYLOV', 'MAX_RESTARTS', 'SCHUR_SAFETY']
class BiLangmuir(BindingBaseClass): """Parameters for Multi Component Bi-Langmuir binding model. Attributes ---------- adsorption_rate : list of unsigned floats. Length depends on n_comp. Adsorption rate constants. desorption_rate : list of unsigned floats. Length depends on n_comp. Desorption rate constants saturation_capacity : list of unsigned floats. Length depends on n_comp. Maximum adsorption capacities. """ adsorption_rate = DependentlySizedUnsignedList(dep=('n_comp', 'n_states')) desorption_rate = DependentlySizedUnsignedList(dep=('n_comp', 'n_states'), default=1) saturation_capacity = DependentlySizedUnsignedList(dep=('n_comp', 'n_states')) n_states = UnsignedInteger() def __init__(self, *args, n_states=2, **kwargs): super().__init__(*args, **kwargs) self.n_states = n_states self._parameter_names += [ 'adsorption_rate', 'desorption_rate', 'saturation_capacity', 'n_states' ] @property def n_total_states(self): return self.n_comp * self.n_states
class LRMDiscretizationFV(DiscretizationParametersBase): ncol = UnsignedInteger(default=100) use_analytic_jacobian = Bool(default=True) reconstruction = Switch(default='WENO', valid=['WENO']) _parameters = DiscretizationParametersBase._parameters + [ 'ncol', 'use_analytic_jacobian', 'reconstruction', ] _dimensionality = ['ncol']
class SolverTimeIntegratorParametersGroup(ParametersGroup): """Class for defining the solver time integrator parameters for cadet. See also -------- ParametersGroup """ abstol = UnsignedFloat(default=1e-8) algtol = UnsignedFloat(default=1e-12) reltol = UnsignedFloat(default=1e-6) reltol_sens = UnsignedFloat(default=1e-12) init_step_size = UnsignedFloat(default=1e-6) max_steps = UnsignedInteger(default=1000000) max_step_size = UnsignedInteger(default=1000000) errortest_sens = Bool(default=False) max_newton_iter = UnsignedInteger(default=1000000) max_errtest_fail = UnsignedInteger(default=1000000) max_convtest_fail = UnsignedInteger(default=1000000) max_newton_iter_sens = UnsignedInteger(default=1000000) _parameters = [ 'abstol', 'algtol', 'reltol', 'reltol_sens', 'init_step_size', 'max_steps', 'max_step_size', 'errortest_sens', 'max_newton_iter', 'max_errtest_fail', 'max_convtest_fail', 'max_newton_iter_sens' ]
class SensitivityParametersGroup(ParametersGroup): """Class for defining the sensitivity parameters. The sensitivity parameters NSENS and SENS_METHOD are defined with default values. See also -------- ParametersGroup """ nsens = UnsignedInteger(default=0) sens_method = Switch(default='ad1', valid=['ad1'])
class LRMPDiscretizationFV(DiscretizationParametersBase): ncol = UnsignedInteger(default=100) par_geom = Switch( default='SPHERE', valid=['SPHERE', 'CYLINDER', 'SLAB'] ) use_analytic_jacobian = Bool(default=True) reconstruction = Switch(default='WENO', valid=['WENO']) gs_type = Bool(default=True) max_krylov = UnsignedInteger(default=0) max_restarts = UnsignedInteger(default=10) schur_safety = UnsignedFloat(default=1.0e-8) _parameters = DiscretizationParametersBase._parameters + [ 'ncol', 'par_geom', 'use_analytic_jacobian', 'reconstruction', 'gs_type', 'max_krylov', 'max_restarts', 'schur_safety' ] _dimensionality = ['ncol']
class GRMDiscretizationFV(DiscretizationParametersBase): ncol = UnsignedInteger(default=100) npar = UnsignedInteger(default=5) par_geom = Switch( default='SPHERE', valid=['SPHERE', 'CYLINDER', 'SLAB'] ) par_disc_type = Switch( default='EQUIDISTANT_PAR', valid=['EQUIDISTANT_PAR', 'EQUIVOLUME_PAR', 'USER_DEFINED_PAR'] ) par_disc_vector = DependentlySizedRangedList(lb=0, ub=1, dep='par_disc_vector_length') par_boundary_order = RangedInteger(lb=1, ub=2, default=2) use_analytic_jacobian = Bool(default=True) reconstruction = Switch(default='WENO', valid=['WENO']) gs_type = Bool(default=True) max_krylov = UnsignedInteger(default=0) max_restarts = UnsignedInteger(default=10) schur_safety = UnsignedFloat(default=1.0e-8) fix_zero_surface_diffusion = Bool(default=False) _parameters = DiscretizationParametersBase._parameters + [ 'ncol', 'npar', 'par_geom', 'par_disc_type', 'par_disc_vector', 'par_boundary_order', 'use_analytic_jacobian', 'reconstruction', 'gs_type', 'max_krylov', 'max_restarts', 'schur_safety', 'fix_zero_surface_diffusion', ] _dimensionality = ['ncol', 'npar'] @property def par_disc_vector_length(self): return self.npar + 1
class COBYLA(SciPyInterface): """Class from scipy for optimization with COBYLA as method for the solver. This class is a wrapper for the method COBYLA from the optimization suite of the scipy interface. It defines the solver options in the local variable options as a dictionary and implements the abstract method run for running the optimization. """ rhobeg = UnsignedFloat(default=1) maxiter = UnsignedInteger(default=1000) disp = Bool(default=False) catol = UnsignedFloat(default=0.0002) _options = ['rhobeg', 'maxiter', 'disp', 'catol']
class ZoneBaseClass(UnitBaseClass): n_columns = UnsignedInteger() flow_direction = Integer(default=1) def __init__(self, n_comp, name, n_columns=1, flow_direction=1, initial_state=None, *args, **kwargs): self.n_columns = n_columns self.flow_direction = flow_direction self.initial_state = initial_state self._inlet_unit = Cstr(n_comp, f'{name}_inlet') self._inlet_unit.V = 1e-6 self._outlet_unit = Cstr(n_comp, f'{name}_outlet') self._outlet_unit.V = 1e-6 super().__init__(n_comp, name, *args, **kwargs) @property def initial_state(self): return self._initial_state @initial_state.setter def initial_state(self, initial_state): if initial_state is None: self._initial_state = initial_state return if not isinstance(initial_state, list): initial_state = self.n_columns * [initial_state] if len(initial_state) != self.n_columns: raise CADETProcessError(f"Expected size {self.n_columns}") self._initial_state = initial_state @property def inlet_unit(self): return self._inlet_unit @property def outlet_unit(self): return self._outlet_unit
class ConsistencySolverParametersGroup(ParametersGroup): """Class for defining the consistency solver parameters for cadet. See also -------- ParametersGroup """ solver_name = Switch( default='LEVMAR', valid=['LEVMAR', 'ATRN_RES', 'ARTN_ERR', 'COMPOSITE'] ) init_damping = UnsignedFloat(default=0.01) min_damping = UnsignedFloat(default=0.0001) max_iterations = UnsignedInteger(default=50) subsolvers = Switch( default='LEVMAR', valid=['LEVMAR', 'ATRN_RES', 'ARTN_ERR'] ) _parameters = [ 'solver_name', 'init_damping', 'min_damping', 'max_iterations', 'subsolvers' ]
class OptimizationResults(metaclass=StructMeta): """Class for storing optimization results including the solver configuration Attributes ---------- optimization_problem : OptimizationProblem Optimization problem evaluation_object : obj Evaluation object in optimized state. solver_name : str Name of the solver used to simulate the process solver_parameters : dict Dictionary with parameters used by the solver exit_flag : int Information about the solver termination. exit_message : str Additional information about the solver status time_elapsed : float Execution time of simulation. x : list Values of optimization variables at optimum. f : np.ndarray Value of objective function at x. c : np.ndarray Values of constraint function at x """ x0 = List() solver_name = String() solver_parameters = Dict() exit_flag = UnsignedInteger() exit_message = String() time_elapsed = UnsignedFloat() x = List() f = NdArray() c = NdArray() performance = Dict() def __init__( self, optimization_problem, evaluation_object, solver_name, solver_parameters, exit_flag, exit_message, time_elapsed, x, f, c, performance, frac=None, history=None ): self.optimization_problem = optimization_problem self.evaluation_object = evaluation_object self.solver_name = solver_name self.solver_parameters = solver_parameters self.exit_flag = exit_flag self.exit_message = exit_message self.time_elapsed = time_elapsed self.x = x self.f = f if c is not None: self.c = c self.performance = performance self.frac = frac self.history = history def to_dict(self): return { 'optimization_problem': self.optimization_problem.name, 'optimization_problem_parameters': self.optimization_problem.parameters, 'evaluation_object_parameters': self.evaluation_object.parameters, 'x0': self.optimization_problem.x0, 'solver_name': self.solver_name, 'solver_parameters': self.solver_parameters, 'exit_flag': self.exit_flag, 'exit_message': self.exit_message, 'time_elapsed': self.time_elapsed, 'x': self.x, 'f': self.f.tolist(), 'c': self.c.tolist(), 'performance': self.performance, 'git': { 'chromapy_branch': str(settings.repo.active_branch), 'chromapy_commit': settings.repo.head.object.hexsha } } def save(self, directory): path = os.path.join(settings.project_directory, directory, 'results.json') with open(path, 'w') as f: json.dump(self.to_dict(), f, indent=4) def plot_solution(self): pass
class ReactionBaseClass(metaclass=StructMeta): """Abstract base class for parameters of binding models. Attributes ---------- n_comp : UnsignedInteger number of components. parameters : dict dict with parameter values. name : String name of the binding model. """ name = String() n_comp = UnsignedInteger() _parameter_names = [] def __init__(self, component_system, name=None): self.component_system = component_system self.name = name self._parameters = { param: getattr(self, param) for param in self._parameter_names } @property def model(self): return self.__class__.__name__ @property def component_system(self): return self._component_system @component_system.setter def component_system(self, component_system): if not isinstance(component_system, ComponentSystem): raise TypeError('Expected ComponentSystem') self._component_system = component_system @property def n_comp(self): return self.component_system.n_comp @property def parameters(self): """dict: Dictionary with parameter values. """ return {param: getattr(self, param) for param in self._parameter_names} @parameters.setter def parameters(self, parameters): for param, value in parameters.items(): if param not in self._parameter_names: raise CADETProcessError('Not a valid parameter') setattr(self, param, value) def __repr__(self): return '{}(n_comp={}, name=\'{}\')'.format(self.__class__.__name__, self.n_comp, self.name) def __str__(self): return self.name
class FlowSheet(metaclass=StructMeta): """Class to design process flow sheet. In this class, UnitOperation models are added and connected in a flow sheet. Attributes ---------- n_comp : UnsignedInteger Number of components of the units in the flow sheet. name : String Name of the FlowSheet. units : list UnitOperations in the FlowSheet. connections : dict Connections of UnitOperations. output_states : dict Split ratios of outgoing streams of UnitOperations. """ name = String() n_comp = UnsignedInteger() def __init__(self, component_system, name=None): self.component_system = component_system self.name = name self._units = [] self._feed_sources = [] self._eluent_sources = [] self._chromatogram_sinks = [] self._connections = Dict() self._output_states = Dict() self._flow_rates = Dict() self._parameters = Dict() self._section_dependent_parameters = Dict() self._polynomial_parameters = Dict() @property def component_system(self): return self._component_system @component_system.setter def component_system(self, component_system): if not isinstance(component_system, ComponentSystem): raise TypeError('Expected ComponentSystem') self._component_system = component_system @property def n_comp(self): return self.component_system.n_comp def _unit_name_decorator(func): def wrapper(self, unit, *args, **kwargs): """Enable calling functions with unit object or unit name. """ if isinstance(unit, str): try: unit = self.units_dict[unit] except KeyError: raise CADETProcessError('Not a valid unit') return func(self, unit, *args, **kwargs) return wrapper def update_parameters(self): for unit in self.units: self._parameters[unit.name] = unit.parameters self._section_dependent_parameters[unit.name] = \ unit.section_dependent_parameters self._polynomial_parameters[unit.name] = unit.polynomial_parameters self._parameters['output_states'] = { unit.name: self.output_states[unit] for unit in self.units } self._section_dependent_parameters['output_states'] = { unit.name: self.output_states[unit] for unit in self.units } def update_parameters_decorator(func): def wrapper(self, *args, **kwargs): """Update parameters dict to save time. """ results = func(self, *args, **kwargs) self.update_parameters() return results return wrapper @property def units(self): """list: list of all unit_operations in the flow sheet. """ return self._units @property def units_dict(self): """dict: Unit names and objects. """ return {unit.name: unit for unit in self.units} @property def unit_names(self): """list: Unit names """ return [unit.name for unit in self.units] @property def number_of_units(self): """int: Number of unit operations in the FlowSheet. """ return len(self._units) @_unit_name_decorator def get_unit_index(self, unit): """Return the unit index of the unit. Parameters ---------- unit : UnitBaseClass UnitBaseClass object of which the index is to be returned. Raises ------ CADETProcessError If unit does not exist in the current flow sheet. Returns ------- unit_index : int Returns the unit index of the unit_operation. """ if unit not in self.units: raise CADETProcessError('Unit not in flow sheet') return self.units.index(unit) @property def sources(self): """list: All UnitOperations implementing the SourceMixin interface. """ return [unit for unit in self._units if isinstance(unit, SourceMixin)] @property def sinks(self): """list: All UnitOperations implementing the SinkMixin interface. """ return [unit for unit in self._units if isinstance(unit, SinkMixin)] @property def units_with_binding(self): """list: UnitOperations with binding models. """ return [unit for unit in self._units if not isinstance(unit.binding_model, NoBinding)] @update_parameters_decorator def add_unit( self, unit, feed_source=False, eluent_source=False, chromatogram_sink=False ): """Add unit to the flow sheet. Parameters ---------- unit : UnitBaseClass UnitBaseClass object to be added to the flow sheet. feed_source : bool If True, add unit to feed sources. eluent_source : bool If True, add unit to eluent sources. chromatogram_sink : bool If True, add unit to chromatogram sinks. Raises ------ TypeError If unit is no instance of UnitBaseClass. CADETProcessError If unit already exists in flow sheet. If n_comp does not match with FlowSheet. See Also -------- remove_unit """ if not isinstance(unit, UnitBaseClass): raise TypeError('Expected UnitOperation') if unit in self._units: raise CADETProcessError('Unit already part of System') if unit.component_system is not self.component_system: raise CADETProcessError('Component systems do not match.') self._units.append(unit) self._connections[unit] = Dict({ 'origins': [], 'destinations': [], }) self._output_states[unit] = [] self._flow_rates[unit] = [] super().__setattr__(unit.name, unit) if feed_source: self.add_feed_source(unit) if eluent_source: self.add_eluent_source(unit) if chromatogram_sink: self.add_chromatogram_sink(unit) @update_parameters_decorator def remove_unit(self, unit): """Remove unit from flow sheet. Removes unit from the list. Tries to remove units which are twice located as desinations. For this the origins and destinations are deleted for the unit. Raises a CADETProcessError if an ValueError is excepted. If the unit is specified as feed_source, eluent_source or chromatogram_sink, the corresponding attributes are deleted. Parameters ---------- unit : UnitBaseClass UnitBaseClass object to be removed to the flow sheet. Raises ------ CADETProcessError If unit does not exist in the flow sheet. See Also -------- add_unit feed_source eluent_source chromatogram_sink """ if unit not in self.units: raise CADETProcessError('Unit not in flow sheet') if unit is self.feed_sources: self.remove_feed_source(unit) if unit is self.eluent_sources: self.remove_eluent_source(unit) if unit is self.chromatogram_sinks: self.remove_chromatogram_sink(unit) origins = self.connections[unit].origins.copy() for origin in origins: self.remove_connection(origin, unit) destinations = self.connections[unit].destinations.copy() for destination in destinations: self.remove_connection(unit, destination) self._units.remove(unit) self._connections.pop(unit) self._output_states.pop(unit) self.__dict__.pop(unit.name) @property def connections(self): """dict: In- and outgoing connections for each unit. See Also -------- add_connection remove_connection """ return self._connections @update_parameters_decorator def add_connection(self, origin, destination): """Add connection between units 'origin' and 'destination'. Parameters ---------- origin : UnitBaseClass UnitBaseClass from which the connection originates. destination : UnitBaseClass UnitBaseClass where the connection terminates. Raises ------ CADETProcessError If origin OR destination do not exist in the current flow sheet. If connection already exists in the current flow sheet. See Also -------- connections remove_connection output_state """ if origin not in self._units: raise CADETProcessError('Origin not in flow sheet') if destination not in self._units: raise CADETProcessError('Destination not in flow sheet') if destination in self.connections[origin].destinations: raise CADETProcessError('Connection already exists') self._connections[origin].destinations.append(destination) self._connections[destination].origins.append(origin) self.set_output_state(origin, 0) @update_parameters_decorator def remove_connection(self, origin, destination): """Remove connection between units 'origin' and 'destination'. Parameters ---------- origin : UnitBaseClass UnitBaseClass from which the connection originates. destination : UnitBaseClass UnitBaseClass where the connection terminates. Raises ------ CADETProcessError If origin OR destination do not exist in the current flow sheet. If connection does not exists in the current flow sheet. See Also -------- connections add_connection """ if origin not in self._units: raise CADETProcessError('Origin not in flow sheet') if destination not in self._units: raise CADETProcessError('Destination not in flow sheet') try: self._connections[origin].destinations.remove(destination) self._connections[destination].origins.remove(origin) except KeyError: raise CADETProcessError('Connection does not exist.') @property def output_states(self): return self._output_states @_unit_name_decorator @update_parameters_decorator def set_output_state(self, unit, state): """Set split ratio of outgoing streams for UnitOperation. Parameters ---------- unit : UnitBaseClass UnitOperation of flowsheet. state : int or list of floats new output state of the unit. Raises ------ CADETProcessError If unit not in flowSheet If state is integer and the state >= the state_length. If the length of the states is unequal the state_length. If the sum of the states is not equal to 1. """ if unit not in self._units: raise CADETProcessError('Unit not in flow sheet') state_length = len(self.connections[unit].destinations) if state_length == 0: output_state = [] if isinstance(state, (int, np.int64)): if state >= state_length: raise CADETProcessError('Index exceeds destinations') output_state = [0] * state_length output_state[state] = 1 else: if len(state) != state_length: raise CADETProcessError( 'Expected length {}.'.format(state_length)) elif sum(state) != 1: raise CADETProcessError('Sum of fractions must be 1') output_state = state self._output_states[unit] = output_state def get_flow_rates(self, state=None): """Calculate flow rate for all connections.unit operation flow rates. If an additional state is passed, it will b Parameters ---------- state : Dict, optional Output states Returns ------- flow_rates : Dict Volumetric flow rate of each unit operation. """ flow_rates = { unit.name: unit.flow_rate for unit in self.sources } output_states = self.output_states if state is not None: for param, value in state.items(): param = param.split('.') unit_name = param[1] param_name = param[-1] if param_name == 'flow_rate': flow_rates[unit_name] = value[0] elif unit_name == 'output_states': unit = self.units_dict[param_name] output_states[unit] = list(value.ravel()) def list_factory(): return [0,0,0,0] total_flow_rates = {unit.name: list_factory() for unit in self.units} destination_flow_rates = { unit.name: defaultdict(list_factory) for unit in self.units } for i in range(4): solution = self.solve_flow_rates(flow_rates, output_states, i) if solution is not None: for unit_index, unit in enumerate(self.units): total_flow_rates[unit.name][i] = \ float(solution['Q_total_{}'.format(unit_index)]) for destination in self.connections[unit].destinations: destination_index = self.get_unit_index(destination) destination_flow_rates[unit.name][destination.name][i] = \ float(solution['Q_{}_{}'.format(unit_index, destination_index)]) flow_rates = Dict() for unit in self.units: flow_rates[unit.name].total = np.array(total_flow_rates[unit.name]) for destination, flow_rate in destination_flow_rates[unit.name].items(): flow_rates[unit.name].destinations[destination] = np.array(flow_rate) return flow_rates def solve_flow_rates(self, source_flow_rates, output_states, coeff=0): """Solve flow rates of system using sympy. Because a simple 'push' algorithm cannot be used when closed loops are present in a FlowSheet (e.g. SMBs), sympy is used to set up and solve the system of equations. Parameters ---------- source_flow_rates: dict Flow rates of Source UnitOperations. output_states: dict Output states of all UnitOperations. coeff: int Polynomial coefficient of flow rates to be solved. Returns ------- solution : dict Solution of the flow rates in the system Note ---- Since dynamic flow rates can be described as cubic polynomials, the flow rates are solved individually for all coefficients. """ coeffs = np.array( [source_flow_rates[unit.name][coeff] for unit in self.sources] ) if not np.any(coeffs): return None # Setup lists for symbols unit_total_flow_symbols = sym.symbols( 'Q_total_0:{}'.format(self.number_of_units) ) unit_inflow_symbols = [] unit_outflow_symbols = [] unit_total_flow_eq = [] unit_outflow_eq = [] # Setup symbolic equations for unit_index, unit in enumerate(self.units): if isinstance(unit, SourceMixin): unit_total_flow_eq.append( sym.Add( unit_total_flow_symbols[unit_index], - float(source_flow_rates[unit.name][coeff]) ) ) else: unit_i_inflow_symbols = [] for origin in self.connections[unit].origins: origin_index = self.get_unit_index(origin) unit_i_inflow_symbols.append( sym.symbols('Q_{}_{}'.format(origin_index, unit_index)) ) unit_i_total_flow_eq = sym.Add( *unit_i_inflow_symbols, -unit_total_flow_symbols[unit_index] ) unit_inflow_symbols += unit_i_inflow_symbols unit_total_flow_eq.append(unit_i_total_flow_eq) if not isinstance(unit, Sink): output_state = output_states[unit] unit_i_outflow_symbols = [] for destination in self.connections[unit].destinations: destination_index = self.get_unit_index(destination) unit_i_outflow_symbols.append( sym.symbols('Q_{}_{}'.format(unit_index, destination_index)) ) unit_i_outflow_eq = [ sym.Add( unit_i_outflow_symbols[dest], -unit_total_flow_symbols[unit_index]*output_state[dest] ) for dest in range(len(self.connections[unit].destinations)) ] unit_outflow_symbols += unit_i_outflow_symbols unit_outflow_eq += unit_i_outflow_eq # Solve system of equations solution = sym.solve( unit_total_flow_eq + unit_outflow_eq, (*unit_total_flow_symbols, *unit_inflow_symbols, *unit_outflow_symbols) ) solution = {str(key): value for key, value in solution.items()} return solution @property def feed_sources(self): """list: List of sources considered for calculating recovery yield. """ return self._feed_sources @_unit_name_decorator def add_feed_source(self, feed_source): """Add source to list of units to be considered for recovery. Parameters ---------- feed_source : SourceMixin Unit to be added to list of feed sources Raises ------ CADETProcessError If unit is not in a source object If unit is already marked as feed source """ if feed_source not in self.sources: raise CADETProcessError('Expected Source') if feed_source in self._feed_sources: raise CADETProcessError( '{} is already eluent source'.format(feed_source) ) self._feed_sources.append(feed_source) @_unit_name_decorator def remove_feed_source(self, feed_source): """Remove source from list of units to be considered for recovery. Parameters ---------- feed_source : SourceMixin Unit to be removed from list of feed sources. """ if feed_source not in self._feed_sources: raise CADETProcessError('Unit \'{}\' is not a feed source.'.format( feed_source)) self._feed_sources.remove(feed_source) @property def eluent_sources(self): """list: List of sources to be considered for eluent consumption. """ return self._eluent_sources @_unit_name_decorator def add_eluent_source(self, eluent_source): """Add source to list of units to be considered for eluent consumption. Parameters ---------- eluent_source : SourceMixin Unit to be added to list of eluent sources. Raises ------ CADETProcessError If unit is not in a source object If unit is already marked as eluent source """ if eluent_source not in self.sources: raise CADETProcessError('Expected Source') if eluent_source in self._eluent_sources: raise CADETProcessError('{} is already eluent source'.format( eluent_source)) self._eluent_sources.append(eluent_source) @_unit_name_decorator def remove_eluent_source(self, eluent_source): """Remove source from list of units to be considered eluent consumption. Parameters ---------- eluent_source : SourceMixin Unit to be added to list of eluent sources. Raises ------ CADETProcessError If unit is not in eluent sources """ if eluent_source not in self._eluent_sources: raise CADETProcessError('Unit \'{}\' is not an eluent source.'.format( eluent_source)) self._eluent_sources.remove(eluent_source) @property def chromatogram_sinks(self): """list: List of sinks to be considered for fractionation. """ return self._chromatogram_sinks @_unit_name_decorator def add_chromatogram_sink(self, chromatogram_sink): """Add sink to list of units to be considered for fractionation. Parameters ---------- chromatogram_sink : SinkMixin Unit to be added to list of chromatogram sinks. Raises ------ CADETProcessError If unit is not a sink object. If unit is already marked as chromatogram sink. """ if chromatogram_sink not in self.sinks: raise CADETProcessError('Expected Sink') if chromatogram_sink in self._chromatogram_sinks: raise CADETProcessError( '{} is already chomatogram sink'.format(chromatogram_sink) ) self._chromatogram_sinks.append(chromatogram_sink) @_unit_name_decorator def remove_chromatogram_sink(self, chromatogram_sink): """Remove sink from list of units to be considered for fractionation. Parameters ---------- chromatogram_sink : SinkMixin Unit to be added to list of chromatogram sinks. Raises ------ CADETProcessError If unit is not a chromatogram sink. """ if chromatogram_sink not in self._chromatogram_sinks: raise CADETProcessError( 'Unit \'{}\' is not a chromatogram sink.'.format(chromatogram_sink) ) self._chromatogram_sinks.remove(chromatogram_sink) @property def parameters(self): return self._parameters @parameters.setter def parameters(self, parameters): try: output_states = parameters.pop('output_states') for unit, state in output_states.items(): unit = self.units_dict[unit] self.set_output_state(unit, state) except KeyError: pass for unit, params in parameters.items(): if unit not in self.units_dict: raise CADETProcessError('Not a valid unit') self.units_dict[unit].parameters = params self.update_parameters() @property def section_dependent_parameters(self): return self._section_dependent_parameters @property def polynomial_parameters(self): return self._polynomial_parameters @property def initial_state(self): initial_state = {unit.name: unit.initial_state for unit in self.units} return initial_state @initial_state.setter def initial_state(self, initial_state): for unit, st in initial_state.items(): if unit not in self.units_dict: raise CADETProcessError('Not a valid unit') self.units_dict[unit].initial_state = st def __getitem__(self, unit_name): """Make FlowSheet substriptable s.t. units can be used as keys. Parameters ---------- unit_name : str Name of the unit. Returns ------- unit : UnitBaseClass UnitOperation of flowsheet. Raises ------ KeyError If unit not in flowSheet """ try: return self.units_dict[unit_name] except KeyError: raise KeyError('Not a valid unit') def __contains__(self, item): """Check if an item is part of units. Parameters ---------- item : UnitBaseClass item to be searched Returns ------- Bool : True if item is in units, otherwise False. Note ---- maybe deficient in documentation. """ if (item in self._units) or (item in self.unit_names): return True else: return False
class PymooInterface(SolverBase): """Wrapper around pymoo. """ seed = UnsignedInteger(default=12345) x_tol = UnsignedFloat(default=1e-8) cv_tol = UnsignedFloat(default=1e-6) f_tol = UnsignedFloat(default=0.0025) pop_size = UnsignedInteger(default=100) nth_gen = UnsignedInteger(default=1) n_last = UnsignedInteger(default=30) n_max_gen = UnsignedInteger(default=100) n_max_evals = UnsignedInteger(default=100000) n_cores = UnsignedInteger(default=0) _options = [ 'x_tol', 'cv_tol', 'f_tol', 'nth_gen', 'n_last', 'n_max_gen', 'n_max_evals', ] def run(self, optimization_problem, use_checkpoint=True): """Solve the optimization problem using the functional pymoo implementation. Returns ------- results : OptimizationResults Optimization results including optimization_problem and solver configuration. See Also -------- evaluate_objectives options """ self.optimization_problem = optimization_problem ieqs = [ lambda x: optimization_problem.evaluate_linear_constraints(x)[0] ] self.problem = PymooProblem(optimization_problem, self.n_cores) if use_checkpoint and os.path.isfile(self.pymoo_checkpoint_path): random.seed(self.seed) algorithm, = np.load(self.pymoo_checkpoint_path, allow_pickle=True).flatten() else: algorithm = self.setup_algorithm() start = time.time() while algorithm.has_next(): algorithm.next() np.save(self.pymoo_checkpoint_path, algorithm) print(algorithm.result().X, algorithm.result().F) elapsed = time.time() - start res = algorithm.result() x = res.X eval_object = optimization_problem.set_variables(x, make_copy=True) if self.optimization_problem.evaluator is not None: frac = optimization_problem.evaluator.simulate_and_fractionate( eval_object, ) performance = frac.performance else: frac = None performance = optimization_problem.evaluate(x, force=True) results = OptimizationResults( optimization_problem=optimization_problem, evaluation_object=eval_object, solver_name=str(self), solver_parameters=self.options, exit_flag=0, exit_message='success', time_elapsed=elapsed, x=res.X.tolist(), f=res.F, c=res.CV, frac=frac, performance=performance.to_dict(), history=res.history, ) return results @property def pymoo_checkpoint_path(self): pymoo_checkpoint_path = os.path.join( settings.project_directory, self.optimization_problem.name + '/pymoo_checkpoint.npy') return pymoo_checkpoint_path @property def population_size(self): if self.pop_size is None: return min(200, max(25 * self.optimization_problem.n_variables, 50)) else: return self.pop_size @property def max_number_of_generations(self): if self.n_max_gen is None: return min(100, max(10 * self.optimization_problem.n_variables, 40)) else: return self.n_max_gen def setup_algorithm(self): algorithm = pymoo.factory.get_algorithm( str(self), ref_dirs=self.ref_dirs, pop_size=self.population_size, sampling=self.optimization_problem.create_initial_values( self.population_size, method='chebyshev', seed=self.seed), repair=RoundIndividuals(self.optimization_problem), ) algorithm.setup(self.problem, termination=self.termination, seed=self.seed, verbose=True) return algorithm @property def termination(self): termination = MultiObjectiveDefaultTermination( x_tol=self.x_tol, cv_tol=self.cv_tol, f_tol=self.f_tol, nth_gen=self.nth_gen, n_last=self.n_last, n_max_gen=self.n_max_gen, n_max_evals=self.n_max_evals) return termination @property def ref_dirs(self): ref_dirs = get_reference_directions( "energy", self.optimization_problem.n_objectives, self.population_size, seed=1) return ref_dirs
class SolutionSolid(BaseSolution): """Solid phase solution inside the particles. Particle_solid: NCOL * NRAD * sum_{j}^{NPARTYPE}{NBOUND,j * NPAR,j} """ n_bound = UnsignedInteger() _coordinates = [ 'axial_coordinates', 'radial_coordinates', 'particle_coordinates' ] def __init__(self, component_system, n_bound, time, solution, axial_coordinates=None, radial_coordinates=None, particle_coordinates=None): self.component_system = component_system self.n_bound = n_bound self.time = time self.axial_coordinates = axial_coordinates # Account for dimension reduction in case of only one cell (e.g. LRMP) if radial_coordinates is not None and len(radial_coordinates) == 1: radial_coordinates = None self.radial_coordinates = radial_coordinates # Account for dimension reduction in case of only one cell (e.g. CSTR) if particle_coordinates is not None and len(particle_coordinates) == 1: particle_coordinates = None self.particle_coordinates = particle_coordinates self.solution = solution @property def n_comp(self): return self.component_system.n_comp * self.n_bound @property def ncol(self): if self.axial_coordinates is None: return None else: return len(self.axial_coordinates) @property def nrad(self): if self.radial_coordinates is None: return return len(self.radial_coordinates) @property def npar(self): if self.particle_coordinates is None: return return len(self.particle_coordinates) def _plot_1D(self, t, ymax): x = self.axial_coordinates if not self.time[0] <= t <= self.time[-1]: raise ValueError("Time exceeds bounds.") t_i = np.where(t <= self.time)[0][0] y = self.solution[t_i, :] if ymax is None: ymax = 1.1 * np.max(y) fig, ax = plotting.setup_figure() ax.plot(x, y) plotting.add_text(ax, f'time = {t:.2f} s') layout = plotting.Layout() layout.x_label = '$z~/~m$' layout.y_label = '$c~/~mM$' layout.labels = self.component_system.labels layout.ylim = (0, ymax) plotting.set_layout(fig, ax, layout) return fig, ax def _plot_2D(self, t, comp, state, vmax): x = self.axial_coordinates y = self.particle_coordinates if not self.time[0] <= t <= self.time[-1]: raise ValueError("Time exceeds bounds.") t_i = np.where(t <= self.time)[0][0] c_i = comp * self.n_bound + state v = self.solution[t_i, :, :, c_i].transpose() if vmax is None: vmax = v.max() fig, ax = plotting.setup_figure() mesh = ax.pcolormesh(x, y, v, shading='gouraud', vmin=0, vmax=vmax) plotting.add_text(ax, f'time = {t:.2f} s') layout = plotting.Layout() layout.title = f'Solid phase concentration, comp={comp}, state={state}' layout.x_label = '$z~/~m$' layout.y_label = '$r~/~m$' layout.labels = self.component_system.labels[c_i] plotting.set_layout(fig, ax, layout) fig.colorbar(mesh) return fig, ax, mesh @plotting.save_fig def plot_at_time(self, t, comp=0, state=0, vmax=None): """Plot bulk solution over spce at given time. Parameters ---------- t : float time for plotting comp : int, optional component index state : int, optional bound state vmax : float, optional Maximum values for plotting. See also -------- CADETProcess.plotting """ if self.npar is None: fig, ax = self._plot_1D(t, vmax) else: fig, ax, mesh = self._plot_2D(t, comp, state, vmax) return ax
class DEAP(SolverBase): """ Adapter for optimization with an Genetic Algorithm called DEAP. Defines the solver options, the statistics, the history, the logbook and the toolbox for recording the optimization progess. It implements the abstract run method for running the optimization with DEAP. Attributes ---------- optimizationProblem: optimizationProblem Given optimization problem to be solved. options : dict Solver options, default set to None, if nothing is given. See also -------- base tools Statistics """ cxpb = UnsignedFloat(default=1) mutpb = UnsignedFloat(default=1) sig_figures = UnsignedInteger(default=3) seed = UnsignedInteger(default=12345) _options = ['cxpb', 'mutpb', 'sig_figures', 'seed'] def run(self, optimization_problem, n_gen=None, population_size=None, use_multicore=True, use_checkpoint=True): self.optimization_problem = optimization_problem # Abbreviations lb = optimization_problem.lower_bounds ub = optimization_problem.upper_bounds n_vars = optimization_problem.n_variables # Settings if population_size is None: population_size = min( 200, max(25 * len(optimization_problem.variables), 50)) if n_gen is None: n_gen = min(100, max(10 * len(optimization_problem.variables), 40)) # NSGA3 Settings n_obj = 1 p = 4 ref_points = tools.uniform_reference_points(n_obj, p) # !!! emo functions breaks if n_obj == 1, this is a temporary fix if n_obj == 1: def sortNDHelperB(best, worst, obj, front): if obj < 0: return sortNDHelperB(best, worst, obj, front) tools.emo.sortNDHelperB = sortNDHelperB # Definition of classes creator.create("FitnessMin", base.Fitness, weights=(-1.0, ) * n_obj) creator.create("Individual", list, fitness=creator.FitnessMin) # Tools toolbox = base.Toolbox() # Map for parallel evaluation manager = multiprocessing.Manager() cache = manager.dict() pool = multiprocessing.Pool() if use_multicore: toolbox.register("map", pool.map) # Functions for creating individuals and population toolbox.register("individual", tools.initIterate, creator.Individual, optimization_problem.create_initial_values) def initIndividual(icls, content): return icls(content) toolbox.register("individual_guess", initIndividual, creator.Individual) def initPopulation(pcls, ind_init, population_size): population = optimization_problem.create_initial_values( population_size) return pcls(ind_init(c) for c in population) toolbox.register( "population", initPopulation, list, toolbox.individual_guess, ) # Functions for evolution toolbox.register("evaluate", self.evaluate, cache=cache) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=lb, up=ub, eta=30.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=lb, up=ub, eta=20.0, indpb=1.0 / n_vars) toolbox.register("select", tools.selNSGA3, nd="standard", ref_points=ref_points) # Round individuals to prevent reevaluation of similar individuals def round_individuals(): def decorator(func): def wrapper(*args, **kargs): offspring = func(*args, **kargs) for child in offspring: for index, el in enumerate(child): child[index] = round(el, self.sig_figures) return offspring return wrapper return decorator toolbox.decorate("mate", round_individuals()) toolbox.decorate("mutate", round_individuals()) statistics = tools.Statistics(key=lambda ind: ind.fitness.values) statistics.register("min", np.min) statistics.register("max", np.max) statistics.register("avg", np.mean) statistics.register("std", np.std) # Load checkpoint if present checkpoint_path = os.path.join( settings.project_directory, optimization_problem.name + '/checkpoint.pkl') if use_checkpoint and os.path.isfile(checkpoint_path): # A file name has been given, then load the data from the file with open(checkpoint_path, "rb") as cp_file: cp = pickle.load(cp_file) self.population = cp["population"] start_gen = cp["generation"] self.halloffame = cp["halloffame"] self.logbook = cp["logbook"] random.setstate(cp["rndstate"]) else: # Start a new evolution start_gen = 0 self.halloffame = tools.HallOfFame(maxsize=1) self.logbook = tools.Logbook() self.logbook.header = "gen", "evals", "std", "min", "avg", "max" # Initialize random population random.seed(self.seed) self.population = toolbox.population(population_size) # Evaluate the individuals with an invalid fitness invalid_ind = [ ind for ind in self.population if not ind.fitness.valid ] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Compile statistics about the population record = statistics.compile(self.population) self.logbook.record(gen=0, evals=len(invalid_ind), **record) # Begin the generational process start = time.time() for gen in range(start_gen, n_gen): self.offspring = algorithms.varAnd(self.population, toolbox, self.cxpb, self.mutpb) # Evaluate the individuals with an invalid fitness invalid_ind = [ ind for ind in self.offspring if not ind.fitness.valid ] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Select the next generation population from parents and offspring self.population = toolbox.select(self.population + self.offspring, population_size) # Compile statistics about the new population record = statistics.compile(self.population) self.logbook.record(gen=gen, evals=len(invalid_ind), **record) self.halloffame.update(self.population) # Create Checkpoint file cp = dict(population=self.population, generation=gen, halloffame=self.halloffame, logbook=self.logbook, rndstate=random.getstate()) with open(checkpoint_path, "wb") as cp_file: pickle.dump(cp, cp_file) best = self.halloffame.items[0] self.logger.info('Generation {}: x: {}, f: {}'.format( str(gen), str(best), str(best.fitness.values[0]))) elapsed = time.time() - start x = self.halloffame.items[0] eval_object = optimization_problem.set_variables(x, make_copy=True) if self.optimization_problem.evaluator is not None: frac = optimization_problem.evaluator.evaluate(eval_object, return_frac=True) performance = frac.performance else: frac = None performance = optimization_problem.evaluate(x, force=True) f = optimization_problem.objective_fun(performance) results = OptimizationResults( optimization_problem=optimization_problem, evaluation_object=eval_object, solver_name=str(self), solver_parameters=self.options, exit_flag=1, exit_message='DEAP terminated successfully', time_elapsed=elapsed, x=list(x), f=f, c=None, frac=frac, performance=performance.to_dict()) return results def evaluate(self, ind, cache=None): results = self.optimization_problem.evaluate_objectives(ind, make_copy=True, cache=cache) return results
class Process(EventHandler): """Class for defining the dynamic changes of a flow sheet. Attributes ---------- flow_sheet : FlowSheet Superstructure of the chromatographic process. name : str Name of the process object to be simulated. system_state : NoneType State of the process object, default set to None. system_state_derivate : NoneType Derivative of the state, default set to None. See also -------- EventHandler FlowSheet ProcessMeta Simulator """ _initial_states = ['system_state', 'system_state_derivative'] _n_cycles = UnsignedInteger(default=1) def __init__(self, flow_sheet, name, *args, **kwargs): self.flow_sheet = flow_sheet self.name = name self.system_state = None self.system_state_derivative = None super().__init__(*args, **kwargs) @property def n_comp(self): return self.flow_sheet.n_comp @property def flow_sheet(self): """FlowSheet: flow sheet of the process model. Raises ------- TypeError: If flow_sheet is not an instance of FlowSheet. Returns ------- flow_sheet : FlowSheet Superstructure of the chromatographic process. """ return self._flow_sheet @flow_sheet.setter def flow_sheet(self, flow_sheet): if not isinstance(flow_sheet, FlowSheet): raise TypeError('Expected FlowSheet') self._flow_sheet = flow_sheet @property def m_feed(self): """ndarray: Mass of the feed components entering the system in one cycle. !!! Account for dynamic flow rates and concentrations! """ flow_rate_timelines = self.flow_rate_timelines feed_all = np.zeros((self.n_comp, )) for feed in self.flow_sheet.feed_sources: feed_flow_rate_time_line = flow_rate_timelines[feed.name].total feed_signal_param = 'flow_sheet.{}.c'.format(feed.name) if feed_signal_param in self.parameter_timelines: feed_signal_time_line = self.parameter_timelines[ feed_signal_param] else: feed_signal_time_line = TimeLine() feed_section = Section(0, self.cycle_time, feed.c, n_entries=self.n_comp, degree=3) feed_signal_time_line.add_section(feed_section) m_i = [ integrate.quad( lambda t: \ feed_flow_rate_time_line.value(t) \ * feed_signal_time_line.value(t)[comp], 0, self.cycle_time, points=self.event_times )[0] for comp in range(self.n_comp) ] feed_all += np.array(m_i) return feed_all @property def V_eluent(self): """float: Volume of the eluent entering the system in one cycle. """ flow_rate_timelines = self.flow_rate_timelines V_all = 0 for eluent in self.flow_sheet.eluent_sources: eluent_time_line = flow_rate_timelines[eluent.name]['total'] V_eluent = eluent_time_line.integral() V_all += V_eluent return float(V_all) @property def V_solid(self): """float: Volume of all solid phase material used in flow sheet. """ return sum( [unit.volume_solid for unit in self.flow_sheet.units_with_binding]) @cached_property_if_locked def flow_rate_timelines(self): """dict: TimeLine of flow_rate for all unit_operations. """ flow_rate_timelines = { unit.name: { 'total': TimeLine(), 'destinations': defaultdict(TimeLine) } for unit in self.flow_sheet.units } # Create dummy section state for Processes without events if len(self.section_states) == 0: it = [(None, {})] else: it = self.section_states.items() for i, (time, state) in enumerate(it): start = self.section_times[i] end = self.section_times[i + 1] flow_rates = self.flow_sheet.get_flow_rates(state) for unit, flow_rate in flow_rates.items(): section = Section(start, end, flow_rate.total, n_entries=1, degree=3) flow_rate_timelines[unit]['total'].add_section(section) for dest, flow_rate_dest in flow_rate.destinations.items(): section = Section(start, end, flow_rate_dest, n_entries=1, degree=3) flow_rate_timelines[unit]['destinations'][ dest].add_section(section) return Dict(flow_rate_timelines) @cached_property_if_locked def flow_rate_section_states(self): """dict: Flow rates for all units for every section time. """ section_states = { time: { unit.name: { 'total': [], 'destinations': defaultdict(dict) } for unit in self.flow_sheet.units } for time in self.section_times[0:-1] } for sec_time in self.section_times[0:-1]: for unit, unit_flow_rates in self.flow_rate_timelines.items(): section_states[sec_time][unit]['total'] = \ unit_flow_rates['total'].coefficients(sec_time)[0] for dest, tl in unit_flow_rates.destinations.items(): section_states[sec_time][unit]['destinations'][dest] = \ tl.coefficients(sec_time)[0] return Dict(section_states) @property def time(self): """np.array: Returns time vector for one cycle Todo ---- Remove from Process; Check also EventHandler.plot_events() See Also -------- cycle_time _time_complete """ cycle_time = self.cycle_time return np.linspace(0, cycle_time, math.ceil(cycle_time)) @property def _time_complete(self): """np.array: time vector for mulitple cycles of a process. See Also -------- time _n_cycles """ complete_time = self._n_cycles * self.cycle_time indices = self._n_cycles * math.ceil(self.cycle_time) - ( self._n_cycles - 1) return np.round(np.linspace(0, complete_time, indices), 1) @property def system_state(self): return self._system_state @system_state.setter def system_state(self, system_state): self._system_state = system_state @property def system_state_derivative(self): return self._system_state_derivative @system_state_derivative.setter def system_state_derivative(self, system_state_derivative): self._system_state_derivative = system_state_derivative @property def parameters(self): parameters = super().parameters parameters['flow_sheet'] = self.flow_sheet.parameters return Dict(parameters) @parameters.setter def parameters(self, parameters): try: self.flow_sheet.parameters = parameters.pop('flow_sheet') except KeyError: pass super(Process, self.__class__).parameters.fset(self, parameters) @property def section_dependent_parameters(self): parameters = Dict() parameters.flow_sheet = self.flow_sheet.section_dependent_parameters return parameters @property def polynomial_parameters(self): parameters = Dict() parameters.flow_sheet = self.flow_sheet.polynomial_parameters return parameters @property def initial_state(self): initial_state = { state: getattr(self, state) for state in self._initial_states } initial_state['flow_sheet'] = self.flow_sheet.initial_state return initial_state @initial_state.setter def initial_state(self, initial_state): try: self.flow_sheet.initial_state = initial_state.pop('flow_sheet') except KeyError: pass for state_name, state_value in initial_state.items(): if state_name not in self._initial_state: raise CADETProcessError('Not an valid state') setattr(self, state_name, state_value) @property def config(self): return Dict({ 'parameters': self.parameters, 'initial_state': self.initial_state }) @config.setter def config(self, config): self.parameters = config['parameters'] self.initial_state = config['initial_state'] @property def process_meta(self): """ProcessMeta: Meta information of the process. See Also -------- ProcessResults Performance """ return ProcessMeta( cycle_time=self.cycle_time, m_feed=self.m_feed, V_solid=self.V_solid, V_eluent=self.V_eluent, ) def add_inlet_profile(self, unit, time, c, component_index=None, s=1e-6): if not isinstance(unit, Source): raise TypeError('Expected Source') if max(time) > self.cycle_time: raise ValueError('Inlet profile exceeds cycle time') if component_index == -1: # Assume same profile for all components if c.ndim > 1: raise ValueError('Expected single concentration profile') c = np.column_stack([c] * 2) elif component_index is None and c.shape[1] != self.n_comp: # Assume c is given for all components raise CADETProcessError('Number of components does not match') for comp in range(self.n_comp): tck = interpolate.splrep(time, c[:, comp], s=s) ppoly = interpolate.PPoly.from_spline(tck) for i, (t, sec) in enumerate(zip(ppoly.x, ppoly.c.T)): if i < 3: continue elif i > len(ppoly.x) - 5: continue evt = self.add_event(f'{unit}_inlet_{comp}_{i-3}', f'flow_sheet.{unit}.c', np.flip(sec), t, comp) def __str__(self): return self.name
class SolverBase(metaclass=StructMeta): """BaseClass for Solver APIs Holds the configuration of the individual solvers and gives an interface for calling the run method. The class has to convert the process configuration into the APIs configuration format and convert the results back to the CADETProcess format. Attributes ---------- n_cycles : int Number of cycles to be simulated n_cycles_min : int If simulate_to_stationarity: Minimum number of cycles to be simulated. n_cycles_max : int If simulate_to_stationarity: Maximum number of cycles to be simulated. simulate_to_stationarity : bool Simulate until stationarity is reached See also -------- Process StationarityEvaluator """ n_cycles = UnsignedInteger(default=1) evaluate_stationarity = Bool(default=False) n_cycles_min = UnsignedInteger(default=3) n_cycles_max = UnsignedInteger(default=100) def __init__(self, stationarity_evaluator=None): self.logger = get_logger('Simulation') if stationarity_evaluator is None: self._stationarity_evaluator = StationarityEvaluator() else: self.stationarity_evaluator = stationarity_evaluator self.evaluate_stationarity = True def simulate(self, process, previous_results=None, **kwargs): """Simulate process. Depending on the state of evaluate_stationarity, the process is simulated until termination criterion is reached. Parameters ---------- process : Process Process to be simulated previous_results : SimulationResults Results of previous simulation run for initial conditions. Returns ------- results : SimulationResults Results the final cycle of the simulation. Raises ------ TypeError If process is not an instance of Process. See also -------- simulate_n_cycles simulate_to_stationarity run """ if not isinstance(process, Process): raise TypeError('Expected Process') process.lock = True if not self.evaluate_stationarity: results = self.simulate_n_cycles(process, self.n_cycles, previous_results, **kwargs) else: results = self.simulate_to_stationarity(process, previous_results, **kwargs) process.lock = False return results @log_time('Simulation') @log_results('Simulation') @log_exceptions('Simulation') def simulate_n_cycles(self, process, n_cyc, previous_results=None, **kwargs): """Simulates process for given number of cycles. Parameters ---------- process : Process Process to be simulated n_cyc : float Number of cycles previous_results : SimulationResults Results of previous simulation run. Returns ------- results : SimulationResults Results the final cycle of the simulation. Raises ------ TypeError If process is not an instance of Process. See also -------- simulate_n_cycles simulate_to_stationarity StationarityEvaluator run """ if not isinstance(process, Process): raise TypeError('Expected Process') if previous_results is not None: self.set_state_from_results(process, previous_results) process._n_cycles = n_cyc return self.run(process, **kwargs) @log_time('Simulation') @log_results('Simulation') @log_exceptions('Simulation') def simulate_to_stationarity(self, process, previous_results=None, **kwargs): """Simulate process until stationarity is reached. Parameters ---------- process : Process Process to be simulated previous_results : SimulationResults Results of previous simulation run. Returns ------- results : SimulationResults Results the final cycle of the simulation. Raises ------ TypeError If process is not an instance of Process. See also -------- simulate run StationarityEvaluator """ if not isinstance(process, Process): raise TypeError('Expected Process') if previous_results is not None: self.set_state_from_results(process, previous_results) # Simulate minimum number of cycles n_cyc = self.n_cycles_min process._n_cycles = n_cyc results = self.run(process, **kwargs) process._n_cycles = 1 # Simulate until stataionarity is reached. while True: n_cyc += 1 self.set_state_from_results(process, results) results_cycle = self.run(process, **kwargs) if n_cyc >= self.n_cycles_max: self.logger.warning("Exceeded maximum number of cycles") break stationarity = False for chrom_old, chrom_new in zip(results.chromatograms, results_cycle.chromatograms): stationarity = self.stationarity_evaluator.assert_stationarity( chrom_old, chrom_new) results.update(results_cycle) if stationarity: break return results def set_state_from_results(self, process, results): process.system_state = results.system_state['state'] process.system_state_derivative = results.system_state[ 'state_derivative'] return process @abstractmethod def run(process, **kwargs): """Abstract Method for running a simulation. Parameters ---------- process : Process Process to be simulated. Returns ------- results : SimulationResults Simulation results including process and solver configuration. Raises ------ TypeError If process is not an instance of Process CADETProcessError If simulation doesn't terminate successfully """ return @property def stationarity_evaluator(self): """Returns the stationarity evaluator. Returns ---------- stationarity_evaluator : StationarityEvaluator Evaluator for cyclic stationarity. """ return self._stationarity_evaluator @stationarity_evaluator.setter def stationarity_evaluator(self, stationarity_evaluator): if not isinstance(stationarity_evaluator, StationarityEvaluator): raise CADETProcessError('Expected StationarityEvaluator') self._stationarity_evaluator = stationarity_evaluator
class SimulationResults(metaclass=StructMeta): """Class for storing simulation results including the solver configuration Attributes ---------- solver_name : str Name of the solver used to simulate the process solver_parameters : dict Dictionary with parameters used by the solver exit_flag : int Information about the solver termination. exit_message : str Additional information about the solver status time_elapsed : float Execution time of simulation. process_name : str Name of the simulated proces process_config : dict Configuration of the simulated process process_meta : dict Meta information of the process. solution : dict Time signals for all cycles of all Unit Operations. system_state : dict Final state and state_derivative of the system. chromatograms : List of chromatogram Solution of the final cycle of the chromatogram_sinks. n_cycles : int Number of cycles that were simulated. Notes ----- Ideally, the final state for each unit operation should be saved. However, CADET does currently provide this functionality. """ solver_name = String() solver_parameters = Dict() exit_flag = UnsignedInteger() exit_message = String() time_elapsed = UnsignedFloat() process_name = String() process_config = Dict() solution_cycles = Dict() system_state = Dict() chromatograms = List() def __init__(self, solver_name, solver_parameters, exit_flag, exit_message, time_elapsed, process_name, process_config, process_meta, solution_cycles, system_state, chromatograms): self.solver_name = solver_name self.solver_parameters = solver_parameters self.exit_flag = exit_flag self.exit_message = exit_message self.time_elapsed = time_elapsed self.process_name = process_name self.process_config = process_config self.process_meta = process_meta self.solution_cycles = solution_cycles self.system_state = system_state self.chromatograms = chromatograms def update(self, new_results): if self.process_name != new_results.process_name: raise CADETProcessError('Process does not match') self.exit_flag = new_results.exit_flag self.exit_message = new_results.exit_message self.time_elapsed += new_results.time_elapsed self.system_state = new_results.system_state self.chromatograms = new_results.chromatograms for unit, solutions in self.solution_cycles.items(): for sol in solutions: self.solution_cycles[unit][sol].append( new_results.solution[unit][sol]) @property def solution(self): """Construct complete solution from individual cyles. """ cycle_time = self.process_config['parameters']['cycle_time'] time_complete = self.time_cycle for i in range(1, self.n_cycles): time_complete = np.hstack( (time_complete, self.time_cycle[1:] + i * cycle_time)) solution = addict.Dict() for unit, solutions in self.solution_cycles.items(): for sol, cycles in solutions.items(): solution[unit][sol] = copy.deepcopy(cycles[0]) solution_complete = cycles[0].solution for i in range(1, self.n_cycles): solution_complete = np.vstack( (solution_complete, cycles[i].solution[1:])) solution[unit][sol].time = time_complete solution[unit][sol].solution = solution_complete return solution @property def n_cycles(self): return len( self.solution_cycles[self._first_unit][self._first_solution]) @property def _first_unit(self): return next(iter(self.solution_cycles)) @property def _first_solution(self): return next(iter(self.solution_cycles[self._first_unit])) @property def time_cycle(self): """np.array: Solution times vector """ return \ self.solution_cycles[self._first_unit][self._first_solution][0].time def save(self, case_dir, unit=None, start=0, end=None): path = os.path.join(settings.project_directory, case_dir) if unit is None: units = self.solution.keys() else: units = self.solution[unit] for unit in units: self.solution[unit][-1].plot(save_path=path + '/' + unit + '_last.png', start=start, end=end) for unit in units: self.solution_complete[unit].plot(save_path=path + '/' + unit + '_complete.png', start=start, end=end) for unit in units: self.solution[unit][-1].plot( save_path=path + '/' + unit + '_overlay.png', overlay=[cyc.signal for cyc in self.solution[unit][0:-1]], start=start, end=end)
class FractionPool(metaclass=StructMeta): """ """ n_comp = UnsignedInteger() def __init__(self, n_comp): self._fractions = [] self.n_comp = n_comp def add_fraction(self, fraction): if not isinstance(fraction, Fraction): raise CADETProcessError('Expected Fraction') if fraction.n_comp != self.n_comp: raise CADETProcessError('Number of components does not match.') self._fractions.append(fraction) @property def fractions(self): if len(self._fractions) == 0: return [Fraction(np.zeros((self.n_comp, )), 0)] return self._fractions @property def volume(self): """Returns the sum of all fraction volumes in the fraction pool Returns ------- volume : float Cumulative volume of all fractions in the pool. """ return sum(frac.volume for frac in self.fractions) @property def mass(self): """Returns the cumulative sum of the fraction masses of the pool. Returns ------- mass : float Cumulative mass of all fractions in the pool. """ return sum(frac.mass for frac in self.fractions) @property def pool_mass(self): """Returns the sum of all component masses of all fractions of the pool. Returns ------- pool_mass : float Cumulative mass of all fractions in the pool. """ return sum(frac.fraction_mass for frac in self.fractions) @property def purity(self): """Returns the average purity of the fraction pool. Invalid values are replaced by zero. Returns ------- purity : ndarray Average purity of the fraction. See also -------- mass pool_mass concentration """ with np.errstate(divide='ignore', invalid='ignore'): purity = self.mass / self.pool_mass return np.nan_to_num(purity) @property def concentration(self): """Returns the average concentration of the fraction pool. Invalid values are replaced by zero. Returns ------- concentration : ndarray Average concentration of the fraction pool. See also -------- mass volume """ with np.errstate(divide='ignore', invalid='ignore'): concentration = self.mass / self.volume return np.nan_to_num(concentration) def __repr__(self): return "%s(n_comp=%r)" % (self.__class__.__name__, self.n_comp)