class Parameters(ModulatoryProjection_Base.Parameters): """ Attributes ---------- value see `value <LearningProjection.value>` :default value: numpy.array([0]) :type: numpy.ndarray :read only: True error_function see `error_function <LearningProjection.error_function>` :default value: `LinearCombination`(weights=numpy.array([[-1], [ 1]])) :type: `Function` function see `function <LearningProjection.function>` :default value: `Linear` :type: `Function` learning_function see `learning_function <LearningProjection.learning_function>` :default value: `BackPropagation` :type: `Function` learning_rate see `learning_rate <LearningProjection.learning_rate>` :default value: None :type: learning_signal see `learning_signal <LearningProjection.learning_signal>` :default value: None :type: :read only: True """ value = Parameter(np.array([0]), read_only=True, aliases=['weight_change_matrix']) function = Parameter(Linear, stateful=False, loggable=False) error_function = Parameter(LinearCombination(weights=[[-1], [1]]), stateful=False, loggable=False) learning_function = Parameter(BackPropagation, stateful=False, loggable=False) learning_rate = Parameter(None, modulable=True) learning_signal = Parameter(None, read_only=True, getter=_learning_signal_getter, setter=_learning_signal_setter)
def __init__( self, sender: tc.optional(tc.any(LearningSignal, LearningMechanism)) = None, receiver: tc.optional(tc.any(ParameterPort, MappingProjection)) = None, error_function: tc.optional(is_function_type) = LinearCombination( weights=[[-1], [1]]), learning_function: tc.optional(is_function_type) = BackPropagation, # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO # learning_signal_params:tc.optional(dict)=None, learning_rate: tc.optional(tc.any(parameter_spec)) = None, learning_enabled: tc.optional(tc.any(bool, tc.enum(ONLINE, AFTER))) = None, weight=None, exponent=None, params: tc.optional(dict) = None, name=None, prefs: is_pref_set = None, **kwargs): # IMPLEMENTATION NOTE: # the error_function and learning_function arguments are implemented to preserve the ability to pass # error function and learning function specifications from the specification of a LearningProjection (used # to implement learning for a MappingProjection, e.g., in a tuple) to the LearningMechanism responsible # for implementing the function; and for specifying the default LearningProjection for a Process. # If receiver has not been assigned, defer init to Port.instantiate_projection_to_state() if sender is None or receiver is None: # Flag for deferred initialization self.initialization_status = ContextFlags.DEFERRED_INIT # parameters should be passed through methods like # instantiate_sender instead of grabbed from attributes like this self._learning_function = learning_function self._learning_rate = learning_rate self._error_function = error_function # replaces similar code in _instantiate_sender try: if sender.owner.learning_rate is not None: learning_rate = sender.owner.learning_rate except AttributeError: pass super().__init__(sender=sender, receiver=receiver, weight=weight, exponent=exponent, params=params, name=name, prefs=prefs, error_function=error_function, learning_function=learning_function, learning_rate=learning_rate, learning_enabled=learning_enabled, **kwargs)
class Parameters(ObjectiveMechanism.Parameters): """ Attributes ---------- variable see `variable <ComparatorMechanism.variable>` :default value: numpy.array([[0], [0]]) :type: ``numpy.ndarray`` :read only: True function see `function <ComparatorMechanism.function>` :default value: `LinearCombination`(weights=numpy.array([[-1], [ 1]])) :type: `Function` output_ports see `output_ports <ComparatorMechanism.output_ports>` :default value: [`OUTCOME`] :type: ``list`` :read only: True sample see `sample <ComparatorMechanism.sample>` :default value: None :type: target see `target <ComparatorMechanism.target>` :default value: None :type: """ # By default, ComparatorMechanism compares two 1D np.array input_ports variable = Parameter(np.array([[0], [0]]), read_only=True, pnl_internal=True, constructor_argument='default_variable') function = Parameter(LinearCombination(weights=[[-1], [1]]), stateful=False, loggable=False) sample = None target = None output_ports = Parameter( [OUTCOME], stateful=False, loggable=False, read_only=True, structural=True, )
def __init__( self, sender: tc.optional(tc.any(LearningSignal, LearningMechanism)) = None, receiver: tc.optional(tc.any(ParameterPort, MappingProjection)) = None, error_function: tc.optional(is_function_type) = LinearCombination( weights=[[-1], [1]]), learning_function: tc.optional(is_function_type) = BackPropagation, # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO # learning_signal_params:tc.optional(dict)=None, learning_rate: tc.optional(tc.any(parameter_spec)) = None, learning_enabled: tc.optional(tc.any(bool, tc.enum(ONLINE, AFTER))) = None, weight=None, exponent=None, params: tc.optional(dict) = None, name=None, prefs: is_pref_set = None, **kwargs): # IMPLEMENTATION NOTE: # the error_function and learning_function arguments are implemented to preserve the ability to pass # error function and learning function specifications from the specification of a LearningProjection (used # to implement learning for a MappingProjection, e.g., in a tuple) to the LearningMechanism responsible # for implementing the function; and for specifying the default LearningProjection for a Process. # Assign args to params and functionParams dicts params = self._assign_args_to_param_dicts( error_function=error_function, learning_function=learning_function, learning_rate=learning_rate, # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO # learning_signal_params=learning_signal_params, learning_enabled=learning_enabled, weight=weight, exponent=exponent, params=params) # If receiver has not been assigned, defer init to Port.instantiate_projection_to_state() if sender is None or receiver is None: # Flag for deferred initialization self.initialization_status = ContextFlags.DEFERRED_INIT super().__init__(sender=sender, receiver=receiver, weight=weight, exponent=exponent, params=params, name=name, prefs=prefs, **kwargs)
def __init__( self, default_variable=None, sample: tc.optional( tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None, target: tc.optional( tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None, function=LinearCombination(weights=[[-1], [1]]), output_states: tc.optional(tc.any(str, Iterable)) = (OUTCOME, ), params=None, name=None, prefs: is_pref_set = None, ** input_states # IMPLEMENTATION NOTE: this is for backward compatibility ): input_states = self._merge_legacy_constructor_args( sample, target, default_variable, input_states) # Default output_states is specified in constructor as a tuple rather than a list # to avoid "gotcha" associated with mutable default arguments # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/) if isinstance(output_states, (str, tuple)): output_states = list(output_states) # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment # (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE # output_states = output_states or [OUTCOME, MSE] # Create a StandardOutputStates object from the list of stand_output_states specified for the class if not isinstance(self.standard_output_states, StandardOutputStates): self.standard_output_states = StandardOutputStates( self, self.standard_output_states, indices=PRIMARY) super().__init__( # monitor=[sample, target], monitor=input_states, function=function, output_states=output_states.copy( ), # prevent default from getting overwritten by later assign params=params, name=name, prefs=prefs, context=ContextFlags.CONSTRUCTOR) # Require Projection to TARGET InputState (already required for SAMPLE as primary InputState) self.input_states[1].parameters.require_projection_in_composition.set( True, override=True)
def __init__(self, default_variable=None, sample: tc.optional( tc.any(OutputPort, Mechanism_Base, dict, is_numeric, str)) = None, target: tc.optional( tc.any(OutputPort, Mechanism_Base, dict, is_numeric, str)) = None, function=LinearCombination(weights=[[-1], [1]]), output_ports: tc.optional(tc.any(str, Iterable)) = None, params=None, name=None, prefs: is_pref_set = None, **kwargs): input_ports = kwargs.pop(INPUT_PORTS, {}) if input_ports: input_ports = {INPUT_PORTS: input_ports} input_ports = self._merge_legacy_constructor_args( sample, target, default_variable, input_ports) # Default output_ports is specified in constructor as a tuple rather than a list # to avoid "gotcha" associated with mutable default arguments # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/) if isinstance(output_ports, (str, tuple)): output_ports = list(output_ports) # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment # (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE # output_ports = output_ports or [OUTCOME, MSE] super().__init__( monitor=input_ports, function=function, output_ports= output_ports, # prevent default from getting overwritten by later assign params=params, name=name, prefs=prefs, **kwargs) # Require Projection to TARGET InputPort (already required for SAMPLE as primary InputPort) self.input_ports[1].parameters.require_projection_in_composition._set( True, Context())
class Parameters(ObjectiveMechanism.Parameters): """ Attributes ---------- variable see `variable <ComparatorMechanism.variable>` :default value: numpy.array([[0], [0]]) :type: numpy.ndarray :read only: True function see `function <ComparatorMechanism.function>` :default value: `LinearCombination`(offset=0.0, operation=sum, scale=1.0, weights=numpy.array([[-1], [ 1]])) :type: `Function` sample see `sample <ComparatorMechanism.sample>` :default value: None :type: target see `target <ComparatorMechanism.target>` :default value: None :type: """ # By default, ComparatorMechanism compares two 1D np.array input_states variable = Parameter(np.array([[0], [0]]), read_only=True) function = Parameter(LinearCombination(weights=[[-1], [1]]), stateful=False, loggable=False) sample = None target = None