def __new__(cls, name, bases, dct): import nineml.abstraction as al from nineml.abstraction.dynamics.utils import (flattener, xml, modifiers) # Extract Parameters Back out from Dict: combined_model = dct['combined_model'] weight_vars = dct['weight_variables'] # Flatten the model: assert isinstance(combined_model, al.ComponentClass) if combined_model.is_flat(): flat_component = combined_model else: flat_component = flattener.flatten(combined_model, name) # Make the substitutions: flat_component.backsub_all() # flat_component.backsub_aliases() # flat_component.backsub_equations() # Close any open reduce ports: modifiers.DynamicPortModifier.close_all_reduce_ports( componentclass=flat_component) # New: dct["combined_model"] = flat_component dct["default_parameters"] = dict( (param.name, 1.0) for param in flat_component.parameters) dct["default_initial_values"] = dict( (statevar.name, 0.0) for statevar in chain(flat_component.state_variables)) dct["receptor_types"] = list(weight_vars.keys()) dct["standard_receptor_type"] = (dct["receptor_types"] == ( 'excitatory', 'inhibitory')) # how to determine this? neuron component has a receive analog port with dimension current, that is not connected to a synapse port? dct["injectable"] = False # how to determine this? synapse component has a receive analog port with dimension voltage? dct["conductance_based"] = True dct["model_name"] = name dct["units"] = dict( (statevar.name, _default_units[statevar.dimension.name]) for statevar in chain(flat_component.state_variables)) # Recording from bindings: dct["recordable"] = ( [port.name for port in flat_component.analog_ports] + ['spikes', 'regime'] + [alias.lhs for alias in flat_component.aliases] + [statevar.name for statevar in flat_component.state_variables]) logger.debug("Creating class '%s' with bases %s and dictionary %s" % (name, bases, dct)) dct["builder"](flat_component, dct["weight_variables"], hierarchical_mode=True) return type.__new__(cls, name, bases, dct)
def __new__(cls, name, bases, dct): import nineml.abstraction as al from nineml.abstraction.dynamics.utils import ( flattener, xml, modifiers) # Extract Parameters Back out from Dict: combined_model = dct['combined_model'] weight_vars = dct['weight_variables'] # Flatten the model: assert isinstance(combined_model, al.ComponentClass) if combined_model.is_flat(): flat_component = combined_model else: flat_component = flattener.flatten(combined_model, name) # Make the substitutions: flat_component.backsub_all() #flat_component.backsub_aliases() #flat_component.backsub_equations() # Close any open reduce ports: modifiers.DynamicPortModifier.close_all_reduce_ports(componentclass=flat_component) # New: dct["combined_model"] = flat_component dct["default_parameters"] = dict((param.name, 1.0) for param in flat_component.parameters) dct["default_initial_values"] = dict((statevar.name, 0.0) for statevar in chain(flat_component.state_variables)) dct["receptor_types"] = list(weight_vars.keys()) dct["standard_receptor_type"] = (dct["receptor_types"] == ('excitatory', 'inhibitory')) dct["injectable"] = False # how to determine this? neuron component has a receive analog port with dimension current, that is not connected to a synapse port? dct["conductance_based"] = True # how to determine this? synapse component has a receive analog port with dimension voltage? dct["model_name"] = name dct["units"] = dict((statevar.name, _default_units[statevar.dimension.name]) for statevar in chain(flat_component.state_variables)) # Recording from bindings: dct["recordable"] = ([port.name for port in flat_component.analog_ports] + ['spikes', 'regime'] + [alias.lhs for alias in flat_component.aliases] + [statevar.name for statevar in flat_component.state_variables]) logger.debug("Creating class '%s' with bases %s and dictionary %s" % (name, bases, dct)) dct["builder"](flat_component, dct["weight_variables"], hierarchical_mode=True) return type.__new__(cls, name, bases, dct)