Esempio n. 1
0
class EVCAuxiliaryFunction(Function_Base):
    """Base class for EVC auxiliary functions
    """
    componentType = kwEVCAuxFunctionType

    class ClassDefaults(Function_Base.ClassDefaults):
        variable = None

    paramClassDefaults = Function_Base.paramClassDefaults.copy()
    paramClassDefaults.update({
        FUNCTION_OUTPUT_TYPE_CONVERSION: False,
        PARAMETER_STATE_PARAMS: None
    })

    classPreferences = {
        kwPreferenceSetName:
        'ValueFunctionCustomClassPreferences',
        kpReportOutputPref:
        PreferenceEntry(False, PreferenceLevel.INSTANCE),
        kpRuntimeParamStickyAssignmentPref:
        PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    @tc.typecheck
    def __init__(self,
                 function,
                 variable=None,
                 params=None,
                 owner=None,
                 prefs: is_pref_set = None,
                 context=None):

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(params=params)
        self.aux_function = function

        super().__init__(
            default_variable=variable,
            params=params,
            owner=owner,
            prefs=prefs,
            context=context,
            function=function,
        )

        self.functionOutputType = None
Esempio n. 2
0
    def matrix(self, matrix):
        if not (isinstance(matrix, np.matrix) or
                    (isinstance(matrix,np.ndarray) and matrix.ndim == 2) or
                    (isinstance(matrix,list) and np.array(matrix).ndim == 2)):
            raise MappingError("Matrix parameter for {} ({}) MappingProjection must be "
                               "an np.matrix, a 2d np.array, or a correspondingly configured list".
                               format(self.name, matrix))

        matrix = np.array(matrix)

        # FIX: Hack to prevent recursion in calls to setter and assign_params
        self.function.__self__.paramValidationPref = PreferenceEntry(False, PreferenceLevel.INSTANCE)

        self.function_object.matrix = matrix

        if hasattr(self, "_parameter_states"):
            self.parameter_states["matrix"].function_object.previous_value = matrix
Esempio n. 3
0
class ComparatorMechanism(ObjectiveMechanism):
    """
    ComparatorMechanism(                                \
        sample,                                         \
        target,                                         \
        input_states=[SAMPLE,TARGET]                    \
        function=LinearCombination(weights=[[-1],[1]],  \
        output_states=OUTCOME                           \
        params=None,                                    \
        name=None,                                      \
        prefs=None)

    Subclass of `ObjectiveMechanism` that compares the values of two `OutputStates <OutputState>`.

    COMMENT:
        Description:
            ComparatorMechanism is a subtype of the ObjectiveMechanism Subtype of the ProcssingMechanism Type
            of the Mechanism Category of the Component class.
            By default, it's function uses the LinearCombination Function to compare two input variables.
            COMPARISON_OPERATION (functionParams) determines whether the comparison is subtractive or divisive
            The function returns an array with the Hadamard (element-wise) differece/quotient of target vs. sample,
                as well as the mean, sum, sum of squares, and mean sum of squares of the comparison array

        Class attributes:
            + componentType (str): ComparatorMechanism
            + classPreference (PreferenceSet): Comparator_PreferenceSet, instantiated in __init__()
            + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE
            + ClassDefaults.variable (value):  Comparator_DEFAULT_STARTING_POINT // QUESTION: What to change here
            + paramClassDefaults (dict): {FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}}

        Class methods:
            None

        MechanismRegistry:
            All instances of ComparatorMechanism are registered in MechanismRegistry, which maintains an
              entry for the subclass, a count for all instances of it, and a dictionary of those instances
    COMMENT

    Arguments
    ---------

    sample : OutputState, Mechanism, value, or string
        specifies the value to compare with the `target` by the `function <ComparatorMechanism.function>`.

    target :  OutputState, Mechanism, value, or string
        specifies the value with which the `sample` is compared by the `function <ComparatorMechanism.function>`.

    input_states :  List[InputState, value, str or dict] or Dict[] : default [SAMPLE, TARGET]
        specifies the names and/or formats to use for the values of the sample and target InputStates;
        by default they are named *SAMPLE* and *TARGET*, and their formats are match the value of the OutputStates
        specified in the **sample** and **target** arguments, respectively (see `ComparatorMechanism_Structure`
        for additional details).

    function :  Function, function or method : default Distance(metric=DIFFERENCE)
        specifies the `function <Comparator.function>` used to compare the `sample` with the `target`.

    output_states :  List[OutputState, value, str or dict] or Dict[] : default [OUTCOME]
        specifies the OutputStates for the Mechanism;

    params :  Optional[Dict[param keyword: param value]]
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, its function, and/or a custom function and its parameters. Values specified for parameters in
        the dictionary override any assigned to those parameters in arguments of the
        constructor.

    name : str : default see `name <ComparatorMechanism.name>`
        specifies the name of the ComparatorMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the ComparatorMechanism; see `prefs <ComparatorMechanism.prefs>` for details.


    Attributes
    ----------

    COMMENT:
    default_variable : Optional[List[array] or 2d np.array]
    COMMENT

    sample : OutputState
        determines the value to compare with the `target` by the `function <ComparatorMechanism.function>`.

    target : OutputState
        determines the value with which `sample` is compared by the `function <ComparatorMechanism.function>`.

    input_states : ContentAddressableList[InputState, InputState]
        contains the two InputStates named, by default, *SAMPLE* and *TARGET*, each of which receives a
        `MappingProjection` from the OutputStates referenced by the `sample` and `target` attributes
        (see `ComparatorMechanism_Structure` for additional details).

    function : CombinationFunction, function or method
        used to compare the `sample` with the `target`.  It can be any PsyNeuLink `CombinationFunction`,
        or a python function that takes a 2d array with two items and returns a 1d array of the same length
        as the two input items.

    value : 1d np.array
        the result of the comparison carried out by the `function <ComparatorMechanism.function>`.

    output_state : OutputState
        contains the `primary <OutputState_Primary>` OutputState of the ComparatorMechanism; the default is
        its *OUTCOME* OutputState, the value of which is equal to the `value <ComparatorMechanism.value>`
        attribute of the ComparatorMechanism.

    output_states : ContentAddressableList[OutputState]
        contains, by default, only the *OUTCOME* (primary) OutputState of the ComparatorMechanism.

    output_values : 2d np.array
        contains one item that is the value of the *OUTCOME* OutputState.

    name : str
        the name of the ComparatorMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the ComparatorMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).


    """
    componentType = COMPARATOR_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'ComparatorCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class ClassDefaults(ObjectiveMechanism.ClassDefaults):
        # By default, ComparatorMechanism compares two 1D np.array input_states
        variable = np.array([[0], [0]])
        function = LinearCombination(weights=[[-1], [1]])

    # ComparatorMechanism parameter and control signal assignments):
    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()

    standard_output_states = ObjectiveMechanism.standard_output_states.copy()

    standard_output_states.extend([{
        NAME: SSE,
        FUNCTION: lambda x: np.sum(x * x)
    }, {
        NAME: MSE,
        FUNCTION: lambda x: np.sum(x * x) / len(x)
    }])

    @tc.typecheck
    def __init__(
        self,
        default_variable=None,
        sample: tc.optional(
            tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None,
        target: tc.optional(
            tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None,
        function=LinearCombination(weights=[[-1], [1]]),
        output_states: tc.optional(tc.any(str, Iterable)) = (OUTCOME, MSE),
        params=None,
        name=None,
        prefs: is_pref_set = None,
        **
        input_states  # IMPLEMENTATION NOTE: this is for backward compatibility
    ):

        input_states = self._merge_legacy_constructor_args(
            sample, target, default_variable, input_states)

        # Default output_states is specified in constructor as a tuple rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if isinstance(output_states, (str, tuple)):
            output_states = list(output_states)

        # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment
        #                     (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE
        # output_states = output_states or [OUTCOME, MSE]

        # Create a StandardOutputStates object from the list of stand_output_states specified for the class
        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super().__init__(  # monitored_output_states=[sample, target],
            monitored_output_states=input_states,
            function=function,
            output_states=output_states.copy(
            ),  # prevent default from getting overwritten by later assign
            params=params,
            name=name,
            prefs=prefs,
            context=ContextFlags.CONSTRUCTOR)

    def _validate_params(self, request_set, target_set=None, context=None):
        """If sample and target values are specified, validate that they are compatible
        """

        if INPUT_STATES in request_set and request_set[
                INPUT_STATES] is not None:
            input_states = request_set[INPUT_STATES]

            # Validate that there are exactly two input_states (for sample and target)
            num_input_states = len(input_states)
            if num_input_states != 2:
                raise ComparatorMechanismError(
                    "{} arg is specified for {} ({}), so it must have exactly 2 items, "
                    "one each for {} and {}".format(INPUT_STATES,
                                                    self.__class__.__name__,
                                                    len(input_states), SAMPLE,
                                                    TARGET))

            # Validate that input_states are specified as dicts
            if not all(
                    isinstance(input_state, dict)
                    for input_state in input_states):
                raise ComparatorMechanismError(
                    "PROGRAM ERROR: all items in input_state args must be converted to dicts"
                    " by calling State._parse_state_spec() before calling super().__init__"
                )

            # Validate length of variable for sample = target
            if VARIABLE in input_states[0]:
                # input_states arg specified in standard state specification dict format
                lengths = [
                    len(input_state[VARIABLE]) for input_state in input_states
                ]
            else:
                # input_states arg specified in {<STATE_NAME>:<STATE SPECIFICATION DICT>} format
                lengths = [
                    len(list(input_state_dict.values())[0][VARIABLE])
                    for input_state_dict in input_states
                ]

            if lengths[0] != lengths[1]:
                raise ComparatorMechanismError(
                    "Length of value specified for {} InputState of {} ({}) must be "
                    "same as length of value specified for {} ({})".format(
                        SAMPLE, self.__class__.__name__, lengths[0], TARGET,
                        lengths[1]))

        elif SAMPLE in request_set and TARGET in request_set:

            sample = request_set[SAMPLE]
            if isinstance(sample, InputState):
                sample_value = sample.value
            elif isinstance(sample, Mechanism):
                sample_value = sample.input_value[0]
            elif is_value_spec(sample):
                sample_value = sample
            else:
                sample_value = None

            target = request_set[TARGET]
            if isinstance(target, InputState):
                target_value = target.value
            elif isinstance(target, Mechanism):
                target_value = target.input_value[0]
            elif is_value_spec(target):
                target_value = target
            else:
                target_value = None

            if sample is not None and target is not None:
                if not iscompatible(
                        sample, target, **{
                            kwCompatibilityLength: True,
                            kwCompatibilityNumeric: True
                        }):
                    raise ComparatorMechanismError(
                        "The length of the sample ({}) must be the same as for the target ({})"
                        "for {} {}".format(len(sample), len(target),
                                           self.__class__.__name__, self.name))

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

    def _merge_legacy_constructor_args(self,
                                       sample,
                                       target,
                                       default_variable=None,
                                       input_states=None):

        # USE sample and target TO CREATE AN InputState specfication dictionary for each;
        # DO SAME FOR InputStates argument, USE TO OVERWRITE ANY SPECIFICATIONS IN sample AND target DICTS
        # TRY tuple format AS WAY OF PROVIDED CONSOLIDATED variable AND OutputState specifications

        sample_dict = _parse_state_spec(owner=self,
                                        state_type=InputState,
                                        state_spec=sample,
                                        name=SAMPLE)

        target_dict = _parse_state_spec(owner=self,
                                        state_type=InputState,
                                        state_spec=target,
                                        name=TARGET)

        # If either the default_variable arg or the input_states arg is provided:
        #    - validate that there are exactly two items in default_variable or input_states list
        #    - if there is an input_states list, parse it and use it to update sample and target dicts
        if input_states:
            input_states = input_states[INPUT_STATES]
            # print("type input_states = {}".format(type(input_states)))
            if not isinstance(input_states, list):
                raise ComparatorMechanismError(
                    "If an \'{}\' argument is included in the constructor for a {} "
                    "it must be a list with two {} specifications.".format(
                        INPUT_STATES, ComparatorMechanism.__name__,
                        InputState.__name__))

        input_states = input_states or default_variable

        if input_states is not None:
            if len(input_states) != 2:
                raise ComparatorMechanismError(
                    "If an \'input_states\' arg is "
                    "included in the constructor for "
                    "a {}, it must be a list with "
                    "exactly two items (not {})".format(
                        ComparatorMechanism.__name__, len(input_states)))

            sample_input_state_dict = _parse_state_spec(
                owner=self,
                state_type=InputState,
                state_spec=input_states[0],
                name=SAMPLE,
                value=None)

            target_input_state_dict = _parse_state_spec(
                owner=self,
                state_type=InputState,
                state_spec=input_states[1],
                name=TARGET,
                value=None)

            sample_dict = recursive_update(sample_dict,
                                           sample_input_state_dict)
            target_dict = recursive_update(target_dict,
                                           target_input_state_dict)

        return [sample_dict, target_dict]
Esempio n. 4
0
class PredictionErrorMechanism(ComparatorMechanism):
    """
    PredictionErrorMechanism(                                \
        sample,                                              \
        target,                                              \
        function=PredictionErrorDeltaFunction,               \
        output_states=[OUTCOME],                             \
        params=None,                                         \
        name=None,                                           \
        prefs=None)

    Calculates the prediction error between the predicted reward and the target

    Arguments
    ---------

    sample : OutputState, Mechanism_Base, dict, number, or str
        specifies the SAMPLE InputState, which will be evaluated by
        the function

    target : OutputState, Mechanism_Base, dict, number, or str
        specifies the TARGET InputState, which will be used by the function to
        evaluate the sample

    function : CombinationFunction, ObjectiveFunction, function, or method : default PredictionErrorDeltaFunction
        the function used to evaluate the sample and target inputs.

    output_states : str, Iterable : default OUTCOME
        by default, contains only the *OUTCOME* (`primary <OutputState_Primary>`)
        OutputState of the PredictionErrorMechanism.

    learning_rate : Number : default 0.3
        controls the weight of later timesteps compared to earlier ones. Higher
        rates weight later timesteps more heavily than previous ones.

    name : str
        the name of the PredictionErrorMechanism; if it is not specified in the
        **name** argument of the constructor, a default is assigned by
        MechanismRegistry (see `Naming` for conventions used for default and
        duplicate names).


    Attributes
    ----------

    sample : OutputState, Mechanism_Base, dict, number, or str
        specifies the SAMPLE InputState, which will be evaluated by
        the function

    target : OutputState, Mechanism_Base, dict, number, or str
        specifies the TARGET InputState, which will be used by the function to
        evaluate the sample

    function : CombinationFunction, ObjectiveFunction, Function, or method : default PredictionErrorDeltaFunction
        the function used to evaluate the sample and target inputs.

    output_states : str, Iterable : default OUTCOME
        by default, contains only the *OUTCOME* (`primary <OutputState_Primary>`)
        OutputState of the PredictionErrorMechanism.

    learning_rate : Number : default 0.3
        controls the weight of later timesteps compared to earlier ones. Higher
        rates weight later timesteps more heavily than previous ones.

    name : str
        the name of the PredictionErrorMechanism; if it is not specified in the
        **name** argument of the constructor, a default is assigned by
        MechanismRegistry (see `Naming` for conventions used for default and
        duplicate names).

    """
    componentType = PREDICTION_ERROR_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    classPreferences = {
        kwPreferenceSetName: 'PredictionErrorMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class ClassDefaults(ComparatorMechanism.ClassDefaults):
        variable = None

    paramClassDefaults = ComparatorMechanism.paramClassDefaults.copy()
    standard_output_states = ComparatorMechanism.standard_output_states.copy()

    @tc.typecheck
    def __init__(self,
                 sample: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_states: tc.optional(tc.any(str, Iterable)) = OUTCOME,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        input_states = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_states=input_states,
                         function=function,
                         output_states=output_states,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=context)

    def _parse_function_variable(self, variable):
        # TODO: update to take sample/reward from variable
        # sample = x(t) in Montague on first run, V(t) on subsequent runs
        sample = self.input_states[SAMPLE].value
        reward = self.input_states[TARGET].value

        return [sample, reward]

    def _execute(self, variable=None, function_variable=None, runtime_params=None, context=None):
        delta = super()._execute(variable=variable, function_variable=function_variable, runtime_params=runtime_params, context=context)
        delta = delta[1:]
        delta = np.append(delta, 0)

        return delta
Esempio n. 5
0
from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
from psyneulink.globals.utilities import Modulation

__all__ = [
    'MechanismPreferenceSet', 'runtimeParamModulationPrefCategoryDefault', 'runtimeParamModulationPrefInstanceDefault',
    'runtimeParamModulationPrefTypeDefault', 'runtimeParamStickyAssignmentPrefCategoryDefault',
    'runtimeParamStickyAssignmentPrefInstanceDefault', 'runtimeParamStickyAssignmentPrefTypeDefault',
]

# MODIFIED 11/29/16 OLD:
# # Keypaths for preferences:
# kpRuntimeParamModulationPref = '_runtime_param_modulation_pref'
# MODIFIED 11/29/16 END

# Default PreferenceSets:
runtimeParamModulationPrefInstanceDefault = PreferenceEntry(Modulation.OVERRIDE, PreferenceLevel.INSTANCE)
runtimeParamModulationPrefTypeDefault = PreferenceEntry(Modulation.ADD, PreferenceLevel.TYPE)
# runtimeParamModulationPrefCategoryDefault = PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.CATEGORY)
runtimeParamModulationPrefCategoryDefault = PreferenceEntry(False, PreferenceLevel.CATEGORY)

runtimeParamStickyAssignmentPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
runtimeParamStickyAssignmentPrefTypeDefault = PreferenceEntry(False, PreferenceLevel.TYPE)
runtimeParamStickyAssignmentPrefCategoryDefault = PreferenceEntry(False, PreferenceLevel.CATEGORY)

reportOutputPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
logPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
verbosePrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
paramValidationPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)


class MechanismPreferenceSet(ComponentPreferenceSet):
Esempio n. 6
0
class ComponentPreferenceSet(PreferenceSet):
    # DOCUMENT: FOR EACH pref TO BE ACCESSIBLE DIRECTLY AS AN ATTRIBUTE OF AN OBJECT,
    #           MUST IMPLEMENT IT AS PROPERTY (WITH GETTER AND SETTER METHODS) IN FUNCTION MODULE
    """Implement and manage PreferenceSets for Component class hierarchy

    Description:
        Implement the following preferences:
            - verbose (bool): enables/disables reporting of (non-exception) warnings and system function
            - paramValidation (bool):  enables/disables run-time validation of the execute method of a Function object
            - reportOutput (bool): enables/disables reporting of execution of execute method
            - log (bool): enables/disables logging for a given object
            - functionRunTimeParams (Modulation): uses run-time params to modulate execute method params
        Implement the following preference levels:
            - SYSTEM: System level default settings (Function.classPreferences)
            - CATEGORY: category-level default settings:
                Mechanism.classPreferences
                State.classPreferences
                Projection.classPreferences
                Function.classPreferences
            - TYPE: type-level default settings (if one exists for the category, else category-level settings are used):
                MechanismTypes:
                    ControlMechanism.classPreferences
                    ProcessingMechanism.classPreferences
                State types:
                    InputState.classPreferences
                    ParameterState.classPreferences
                    OutputState.classPreferences
                Projection types:
                    ControlProjection.classPreferences
                    MappingProjection.classPreferences
            - SUBTYPE: subtype-level default settings (if one exists for the type, else type-level settings are used):
                ControlMechanism subtypes:
                    DefaultControlMechanism.classPreferences
                    EVCControlMechanism.classPreferences
                ProcessingMechanism subtypes:
                    DDM.classPreferences
                    Linear.classPreferences
                    SigmoidLayer.classPreferences
                    IntegratorMechanism.classPreferences
            - INSTANCE: returns the setting specified in the PreferenceSetEntry of the specified object itself

    Initialization arguments:
        - owner (Function object): object to which the PreferenceSet belongs;  (default: DefaultProcessingMechanism)
            Note:  this is used to get appropriate default preferences (from class) for instantiation;
                   however, since a PreferenceSet can be assigned to multiple objects, when accessing the preference
                   the owner is set dynamically, to insure context-relevant PreferenceLevels for returning the setting
        - prefs (dict):  a specification dict, each entry of which must have a:
            key that is a keypath (kp<*>) corresponding to an attribute of the PreferenceSet, from the following set:
                + kwPreferenceSetName: specifies the name of the PreferenceSet
                + kpVerbosePref: print non-exception-related information during execution
                + kpParamValidationPref: validate parameters during execution
                + kpReportOutputPref: report object's ouptut during execution
                + kpLogPref: record attribute data for the object during execution
                + kpRuntimeParamModulationPref: modulate parameters using runtime specification (in pathway)
                + kpRuntimeParamStickAssignmentPref: assignments remain in effect until replaced
            value that is either a PreferenceSet, valid setting for the preference, or a PreferenceLevel; defaults
        - level (PreferenceLevel): ??
        - name (str): name of PreferenceSet
        - context (value): must be self (to call super's abstract class: PreferenceSet)
        - **kargs (dict): dictionary of arguments, that takes precedence over the individual args above

    Class attributes:
        + defaultPreferencesDict (PreferenceSet): SystemDefaultPreferences
        + baseClass (class): Function

    Class methods:
        Note:
        * All of the setters below use PreferenceSet.set_preference, which validates any preference info passed to it,
            and can take a PreferenceEntry, setting, or PreferenceLevel
        - verbosePref():
            returns setting for verbosePref preference at level specified in verbosePref PreferenceEntry of
             owner's PreferenceSet
        - verbosePref(setting=<value>):
            assigns the value of the setting arg to the verbosePref of the owner's PreferenceSet
        - paramValidationPref():
            returns setting for paramValidationPref preference at level specified in paramValidationPref PreferenceEntry
            of owner's PreferenceSet
        - paramValidationPref(setting=<value>):
            assigns the value of the setting arg to the paramValidationPref of the owner's PreferenceSet
        - reportOutputPref():
            returns setting for reportOutputPref preference at level specified in reportOutputPref PreferenceEntry
            of owner's Preference object
        - reportOutputPref(setting=<value>):
            assigns the value of the setting arg to the reportOutputPref of the owner's PreferenceSet
        - logPref():
            returns setting for log preference at level specified in log PreferenceEntry of owner's Preference object
        - logPref(setting=<value>):
            assigns the value of the setting arg to the logPref of the owner's PreferenceSet
                and, if it contains log entries, it adds them to the owner's log
        - runtimeParamModulationPref():
            returns setting for runtimeParamModulation preference at level specified in
             runtimeParamModulation PreferenceEntry of owner's Preference object
        - runtimeParamModulationPref(setting=<value>):
            assigns the value of the setting arg to the runtimeParamModulationPref of the owner's Preference object
        - runtimeParamStickyAssignmentPref():
            returns setting for runtimeParamStickyAssignment preference at level specified in
             runtimeParamStickyAssignment PreferenceEntry of owner's Preference object
        - runtimeParamStickyAssignmentPref(setting=<value>):
            assigns value of the setting arg to the runtimeParamStickyAssignmentPref of the owner's Preference object
    """

    # Use this as both:
    # - a template for the type of each preference (used for validation)
    # - a default set of preferences (where defaults are not otherwise specified)
    defaultPreferencesDict = {
        kwPreferenceSetName:
        'ComponentPreferenceSetDefaults',
        kpVerbosePref:
        PreferenceEntry(False, PreferenceLevel.SYSTEM),
        kpParamValidationPref:
        PreferenceEntry(True, PreferenceLevel.SYSTEM),
        kpReportOutputPref:
        PreferenceEntry(True, PreferenceLevel.SYSTEM),
        kpLogPref:
        PreferenceEntry(LogLevel.OFF, PreferenceLevel.CATEGORY),
        kpRuntimeParamModulationPref:
        PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM),
        kpRuntimeParamStickyAssignmentPref:
        PreferenceEntry(False, PreferenceLevel.SYSTEM)
    }

    baseClass = None

    def __init__(self,
                 owner=None,
                 prefs=None,
                 level=PreferenceLevel.SYSTEM,
                 name=None,
                 context=None,
                 **kargs):
        """Instantiate PreferenceSet for owner and/or classPreferences for owner's class

        If owner is a class, instantiate its classPreferences attribute if that does not already exist,
            using its classPreferenceLevel attribute, and the corresponding preference dict in ComponentDefaultPrefDicts
        If owner is an object:
        - if the owner's classPreferences do not yet exist, instantiate it (as described above)
        - use the owner's <class>.classPreferenceLevel to create a base set of preferences from its classPreferences
        - use PreferenceEntries, settings, or level specifications from dict in prefs arg to replace entries in base set
        If owner is omitted:
        - assigns DefaultProcessingMechanism as owner (this is updated if PreferenceSet is assigned to another object)

        :param owner:
        :param prefs:
        :param level:
        :param name:
        :param context:
        :param kargs:
        """
        if kargs:
            try:
                owner = kargs[kwPrefsOwner]
            except (KeyError, NameError):
                pass
            try:
                prefs = kargs[kwPrefs]
            except (KeyError, NameError):
                pass
            try:
                name = kargs[NAME]
            except (KeyError, NameError):
                pass
            try:
                level = kargs[kwPrefLevel]
            except (KeyError, NameError):
                pass

        # If baseClass has not been assigned, do so here:
        if self.baseClass is None:
            from psyneulink.components.component import Component
            self.baseClass = Component

        # If owner is not specified, assign DefaultProcessingMechanism_Base as default owner
        if owner is None:
            from psyneulink.components.mechanisms.processing.defaultprocessingmechanism import DefaultProcessingMechanism_Base
            DefaultPreferenceSetOwner = DefaultProcessingMechanism_Base(
                name=kwDefaultPreferenceSetOwner)
            owner = DefaultPreferenceSetOwner

        # Get class
        if inspect.isclass(owner):
            owner_class = owner
        else:
            owner_class = owner.__class__

        # If classPreferences have not be instantiated for owner's class, do so here:
        try:
            # If classPreferences are still a dict, they need to be instantiated as a ComponentPreferenceSet
            if isinstance(owner_class.classPreferences, dict):
                raise AttributeError
        except AttributeError:
            super(ComponentPreferenceSet,
                  self).__init__(owner=owner_class,
                                 level=owner_class.classPreferenceLevel,
                                 prefs=ComponentDefaultPrefDicts[
                                     owner_class.classPreferenceLevel],
                                 name=name,
                                 context=self)

        # Instantiate PreferenceSet
        super(ComponentPreferenceSet,
              self).__init__(owner=owner,
                             level=owner_class.classPreferenceLevel,
                             prefs=prefs,
                             name=name,
                             context=self)
        # FIX:  NECESSARY?? 5/30/16
        self._level = level

    #region verbose entry ----------------------------------------------------------------------------------------------

    @property
    def verbosePref(self):
        """Return setting of owner's verbosePref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(kpVerbosePref,
                                               self._verbose_pref.level)[0]

    @verbosePref.setter
    def verbosePref(self, setting):
        """Assign setting to owner's verbosePref
        :param setting:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=kpVerbosePref)

    # region param_validation ----------------------------------------------------------------------------------------------

    @property
    def paramValidationPref(self):
        """Return setting of owner's param_validationPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively call base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(
            kpParamValidationPref, self._param_validation_pref.level)[0]

    @paramValidationPref.setter
    def paramValidationPref(self, setting):
        """Assign setting to owner's param_validationPref
        :param setting:
        :return:
        """
        self.set_preference(setting, kpParamValidationPref)

    #region reportOutput entry -----------------------------------------------------------------------------------------

    @property
    def reportOutputPref(self):
        """Return setting of owner's reportOutputPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls super (closer to base) classes to get preference at specified level
        return self.get_pref_setting_for_level(
            kpReportOutputPref, self._report_output_pref.level)[0]

    @reportOutputPref.setter
    def reportOutputPref(self, setting):
        """Assign setting to owner's reportOutputPref
        :param setting:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=kpReportOutputPref)

    #region log entry --------------------------------------------------------------------------------------------------

    @property
    def logPref(self):
        """Return setting of owner's logPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(kpLogPref,
                                               self._log_pref.level)[0]

    # # VERSION THAT USES OWNER'S logPref TO LIST ENTRIES TO BE RECORDED
    # @logPref.setter
    # def logPref(self, setting):
    #     """Assign setting to owner's logPref and, if it has log entries, add them to owner's log
    #     :param setting:
    #     :return:
    #     """
    #
    #     entries, level = self.set_preference(candidate_info=setting, pref_ivar_name=kpLogPref, [str, list])
    #
    #     if entries:
    #         # Add entries to owner's log
    #         from Globals.Log import Log
    #
    #         try:
    #             self.owner.log.add_entries(entries=entries)
    #         except AttributeError:
    #             self.owner.log = Log(owner=self, entries=entries)

    # VERSION THAT USES OWNER'S logPref AS RECORDING SWITCH
    @logPref.setter
    def logPref(self, setting):
        """Assign setting to owner's logPref
        :param setting:
        :return:
        """
        self.set_preference(candidate_info=setting, pref_ivar_name=kpLogPref)

    #region runtimeParamModulation -------------------------------------------------------------------------------------

    @property
    def runtimeParamModulationPref(self):
        """Returns owner's runtimeParamModulationPref
        :return:
        """
        # return self._runtime_param_modulation_pref
        return self.get_pref_setting_for_level(
            kpRuntimeParamModulationPref,
            self._runtime_param_modulation_pref.level)[0]

    @runtimeParamModulationPref.setter
    def runtimeParamModulationPref(self, setting):
        """Assign runtimeParamModulationPref
        :param entry:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=kpRuntimeParamModulationPref)

    #region runtimeParamStickyAssignment -------------------------------------------------------------------------------

    @property
    def runtimeParamStickyAssignmentPref(self):
        """Returns owner's runtimeParamStickyAssignmentPref
        :return:
        """
        # return self._runtime_param_sticky_assignment_pref
        return self.get_pref_setting_for_level(
            kpRuntimeParamStickyAssignmentPref,
            self._runtime_param_sticky_assignment_pref.level)[0]

    @runtimeParamStickyAssignmentPref.setter
    def runtimeParamStickyAssignmentPref(self, setting):
        """Assign runtimeParamStickyAssignmentPref
        :param entry:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=kpRuntimeParamStickyAssignmentPref)
Esempio n. 7
0
kwSubtypeDefaultPreferences = 'SubtypeDefaultPreferences'
kwInstanceDefaultPreferences = 'InstanceDefaultPreferences'

# Level default preferences dicts:

ComponentPreferenceSetPrefs = {
    kpVerbosePref,
    kpParamValidationPref,
    kpReportOutputPref,
    kpLogPref,
    kpRuntimeParamModulationPref
}

SystemDefaultPreferencesDict = {
    kwPreferenceSetName: kwSystemDefaultPreferences,
    kpVerbosePref: PreferenceEntry(False, PreferenceLevel.SYSTEM),
    kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.SYSTEM),
    kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.SYSTEM),
    kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM)}

CategoryDefaultPreferencesDict = {
    kwPreferenceSetName: kwCategoryDefaultPreferences,
    kpVerbosePref: PreferenceEntry(False, PreferenceLevel.CATEGORY),
    kpParamValidationPref: PreferenceEntry(True, PreferenceLevel.CATEGORY),
    kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.CATEGORY),
    kpLogPref: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    kpRuntimeParamModulationPref: PreferenceEntry(Modulation.MULTIPLY,PreferenceLevel.CATEGORY)}

TypeDefaultPreferencesDict = {
    kwPreferenceSetName: kwTypeDefaultPreferences,
Esempio n. 8
0
class ObjectiveMechanism(ProcessingMechanism_Base):
    # monitored_output_states is an alias to input_states argument, which can
    # still be used in a spec dict
    """
    ObjectiveMechanism(               \
        monitored_output_states,      \
        default_variable,             \
        size,                         \
        function=LinearCombination,   \
        output_states=OUTCOME,        \
        params=None,                  \
        name=None,                    \
        prefs=None)

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that evaluates the value(s)
    of one or more `OutputStates <OutputState>`.

    COMMENT:
        Description:
            ObjectiveMechanism is a subtype of the ProcessingMechanism Type of the Mechanism Category of the
                Component class
            Its function uses the LinearCombination Function to compare two input variables
            COMPARISON_OPERATION (functionParams) determines whether the comparison is subtractive or divisive
            The function returns an array with the Hadamard (element-wise) difference/quotient of target vs. sample,
                as well as the mean, sum, sum of squares, and mean sum of squares of the comparison array

        Class attributes:
            + componentType (str): ObjectiveMechanism
            + classPreference (PreferenceSet): ObjectiveMechanism_PreferenceSet, instantiated in __init__()
            + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE
            + ClassDefaults.variable (value):  None (must be specified using **input_states** and/or
                                               **monitored_output_states**)
            + paramClassDefaults (dict): {FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}}

        Class methods:
            None

        MechanismRegistry:
            All instances of ObjectiveMechanism are registered in MechanismRegistry, which maintains an
              entry for the subclass, a count for all instances of it, and a dictionary of those instances
    COMMENT

    Arguments
    ---------

    monitored_output_states : List[`OutputState`, `Mechanism`, str, value, dict, `MonitoredOutputStatesOption`] or dict
        specifies the OutputStates, the `values <OutputState.value>` of which will be monitored, and evaluated by
        the ObjectiveMechanism's `function <ObjectiveMechanism>` (see `ObjectiveMechanism_Monitored_Output_States`
        for details of specification).

    default_variable : number, list or np.ndarray : default monitored_output_states
        specifies the format of the `variable <ObjectiveMechanism.variable>` for the `InputStates` of the
        ObjectiveMechanism (see `Mechanism_InputState_Specification` for details).

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            T1 = TransferMechanism(size = [3, 2])
            T2 = TransferMechanism(default_variable = [[0, 0, 0], [0, 0]])

    COMMENT:
    input_states :  List[InputState, value, str or dict] or Dict[] : default None
        specifies the names and/or formats to use for the values of the InputStates that receive the input from the
        OutputStates specified in the monitored_output_states** argument; if specified, there must be one for each item
        specified in the **monitored_output_states** argument.
    COMMENT

    function: CombinationFunction, ObjectiveFunction, function or method : default LinearCombination
        specifies the function used to evaluate the values listed in :keyword:`monitored_output_states`
        (see `function <LearningMechanism.function>` for details.

    output_states :  List[OutputState, value, str or dict] or Dict[] : default [OUTCOME]
        specifies the OutputStates for the Mechanism;

    role: Optional[LEARNING, CONTROL]
        specifies if the ObjectiveMechanism is being used for learning or control (see `role` for details).

    params : Dict[param keyword, param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for the
        Mechanism, its `function <Mechanism_Base.function>`, and/or a custom function and its parameters. Values
        specified for parameters in the dictionary override any assigned to those parameters in arguments of the
        constructor.

    name : str : default see `name <ObjectiveMechanism.name>`
        specifies the name of the ObjectiveMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the ObjectiveMechanism; see `prefs <ObjectiveMechanism.prefs>` for details.


    Attributes
    ----------

    variable : 2d ndarray : default array of values of OutputStates in monitor_output_states
        the input to Mechanism's `function <TransferMechanism.function>`.

    monitored_output_states : ContentAddressableList[OutputState]
        determines the OutputStates, the `values <OutputState.value>` of which are monitored, and evaluated by the
        ObjectiveMechanism's `function <ObjectiveMechanism.function>`.  Each item in the list refers to an
        `OutputState` containing the value to be monitored, with a `MappingProjection` from it to the
        corresponding `InputState` listed in the `input_states <ObjectiveMechanism.input_states>` attribute.

    monitored_output_states_weights_and_exponents : List[Tuple(float, float)]
        each tuple in the list contains a weight and exponent associated with a corresponding InputState listed in the
        ObjectiveMechanism's `input_states <ObjectiveMechanism.input_states>` attribute;  these are used by its
        `function <ObjectiveMechanism.function>` to parametrize the contribution that the values of each of the
        OuputStates monitored by the ObjectiveMechanism makes to its output (see `ObjectiveMechanism_Function`)

    input_states : ContentAddressableList[InputState]
        contains the InputStates of the ObjectiveMechanism, each of which receives a `MappingProjection` from the
        OutputStates specified in its `monitored_output_states <ObjectiveMechanism.monitored_output_states>` attribute.

    function : CombinationFunction, ObjectiveFunction, function, or method
        the function used to compare evaluate the values monitored by the ObjectiveMechanism.  The function can be
        any PsyNeuLink `CombinationFunction` or a Python function that takes a 2d array with an arbitrary number of
        items or a number equal to the number of items in the ObjectiveMechanism's variable (and its number of
        input_states), and returns a 1d array.

    role : None, LEARNING or CONTROL
        specifies whether the ObjectiveMechanism is used for learning in a Process or System (in conjunction with a
        `ObjectiveMechanism`), or for control in a System (in conjunction with a `ControlMechanism <ControlMechanism>`).

    value : 1d np.array
        the output of the evaluation carried out by the ObjectiveMechanism's `function <ObjectiveMechanism.function>`.

    output_state : OutputState
        contains the `primary OutputState <OutputState_Primary>` of the ObjectiveMechanism; the default is
        its *OUTCOME* `OutputState <ObjectiveMechanism_Output>`, the value of which is equal to the
        `value <ObjectiveMechanism.value>` attribute of the ObjectiveMechanism.

    output_states : ContentAddressableList[OutputState]
        by default, contains only the *OUTCOME* (`primary <OutputState_Primary>`) OutputState of the ObjectiveMechanism.

    output_values : 2d np.array
        contains one item that is the value of the *OUTCOME* `OutputState <ObjectiveMechanism_Output>`.

    name : str
        the name of the ObjectiveMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the ObjectiveMechanism; if it is not specified in the **prefs** argument of the 
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet 
        <LINK>` for details).


    """

    componentType = OBJECTIVE_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'ObjectiveCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    # ClassDefaults.variable = None;  Must be specified using either **input_states** or **monitored_output_states**
    class ClassDefaults(ProcessingMechanism_Base.ClassDefaults):
        variable = [0]

    # ObjectiveMechanism parameter and control signal assignments):
    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({
        TIME_SCALE: TimeScale.TRIAL,
        FUNCTION: LinearCombination,
    })

    standard_output_states = standard_output_states.copy()

    # FIX:  TYPECHECK MONITOR TO LIST OR ZIP OBJECT
    @tc.typecheck
    def __init__(self,
                 monitored_output_states=None,
                 default_variable=None,
                 size=None,
                 function=LinearCombination,
                 output_states: tc.optional(tc.any(str, Iterable)) = OUTCOME,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=None,
                 **kwargs):

        input_states = monitored_output_states
        if output_states is None or output_states is OUTCOME:
            output_states = [OUTCOME]

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(input_states=input_states,
                                                  output_states=output_states,
                                                  function=function,
                                                  params=params)

        self._learning_role = None

        from psyneulink.components.states.outputstate import StandardOutputStates
        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super().__init__(variable=default_variable,
                         size=size,
                         input_states=input_states,
                         output_states=output_states,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=self)

        # This is used to specify whether the ObjectiveMechanism is associated with a ControlMechanism that is
        #    the controller for a System;  it is set by the ControlMechanism when it creates the ObjectiveMechanism
        self.controller = False

    def _validate_variable(self, variable, context=None):
        """Validate that default_variable (if specified) matches in number of values the monitored_output_states

        """
        # # MODIFIED 10/8/17 OLD: [OBVIATED BY ALIASING OF monitored_output_states TO input_states]
        # # NOTE 6/29/17: (CW)
        # # This is a very questionable check. The problem is that TransferMechanism (if default_variable is passed as
        # # None) expects variable to be initialized to ClassDefaults.variable ([[0]]) while ObjectiveMechanism expects
        # # variable to be initialized to ClassDefaults.variable ([[0]]) AFTER this check has occurred. The problem is,
        # # my solution to this has been to write (in each subclass of ProcessingMechanism) specific behavior on how to
        # # react if both variable and size are None. This is fine but potentially cumbersome for future developers.
        # # We should consider deleting this check entirely, and allowing ProcessingMechanism (or a further parent class)
        # # to always set variable to ClassDefaults.variable if variable and size are both None.
        # # IMPLEMENTATION NOTE:  use self.user_params (i.e., values specified in constructor)
        # #                       since params have not yet been validated and so self.params is not yet available
        # if variable is not None and len(variable) != len(self.user_params[MONITORED_OUTPUT_STATES]):
        #     raise ObjectiveMechanismError("The number of items specified for the default_variable arg ({}) of {} "
        #                                   "must match the number of items specified for its monitored_output_states arg ({})".
        #                                   format(len(variable), self.name, len(self.user_params[MONITORED_OUTPUT_STATES])))
        # MODIFIED 10/8/17 END

        return super()._validate_variable(variable=variable, context=context)

    def _validate_params(self, request_set, target_set=None, context=None):
        """Validate **role**, **monitored_output_states**, amd **input_states** arguments

        """

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

        if ROLE in target_set and target_set[
                ROLE] and not target_set[ROLE] in {LEARNING, CONTROL}:
            raise ObjectiveMechanismError(
                "\'role\'arg ({}) of {} must be either \'LEARNING\' or \'CONTROL\'"
                .format(target_set[ROLE], self.name))

        if (INPUT_STATES in target_set and target_set[INPUT_STATES] is not None
                and not all(input_state is None
                            for input_state in target_set[INPUT_STATES])):
            # FIX: 10/3/17 - ??ARE THESE DOING ANYTHING:  INTEGRATE THEM... HERE OR BELOW (IN _instantiate_input_states)
            if MONITORED_OUTPUT_STATES in target_set:
                monitored_output_states = target_set[MONITORED_OUTPUT_STATES]
            elif hasattr(self, 'monitored_output_states'):
                monitored_output_states = self.monitored_output_states
            else:
                pass

        # FIX: 10/3/17 ->
        if MONITORED_OUTPUT_STATES in target_set and target_set[
                MONITORED_OUTPUT_STATES] is not None:
            pass

    def _instantiate_input_states(self,
                                  monitored_output_states_specs=None,
                                  context=None):
        """Instantiate InputStates for each OutputState specified in monitored_output_states_specs

        Called by _add_monitored_output_states as well as during initialization
            (so must distinguish between initialization and adding to instantiated input_states)

        Parse specifications for **input_states**, using **monitored_output_states** where relevant and instantiate
        input_states.

        Instantiate or extend self.instance_defaults.variable to match number of InputStates.

        Update self.input_state and self.input_states.

        Call _instantiate_monitoring_projection() to instantiate MappingProjection to InputState
            if an OutputState has been specified.
        """
        from psyneulink.components.states.inputstate import InputState
        # If call is for initialization
        if self.init_status is InitStatus.UNSET:
            # Pass self.input_states (containing specs from **input_states** arg of constructor)
            input_states = self.input_states
        else:
            # If initialized, don't pass self.input_states, as this is now a list of existing InputStates
            input_states = None

        # PARSE input_states (=monitored_output_states) specifications into InputState specification dictionaries
        # and ASSIGN self.instance_defaults.variable

        if not input_states:
            # If no input_states are specified, create a default
            input_states = [{STATE_TYPE: InputState, VARIABLE: [0]}]

        # Instantiate InputStates corresponding to OutputStates specified in monitored_output_states
        # instantiated_input_states = super()._instantiate_input_states(input_states=self.input_states, context=context)
        instantiated_input_states = super()._instantiate_input_states(
            input_states=input_states, context=context)
        # MODIFIED 10/3/17 END

    def add_monitored_output_states(self,
                                    monitored_output_states_specs,
                                    context=None):
        """Instantiate `OutputStates <OutputState>` to be monitored by the ObjectiveMechanism.

        Used by other Components to add a `State` or list of States to be monitored by the ObjectiveMechanism.
        The **monitored_output_states_spec** can be a `Mechanism`, `OutputState`, `tuple specification
        <InputState_Tuple_Specification>`, `State specification dictionary <InputState_Specification_Dictionary>`, or
        list with any of these.  If item is a Mechanism, its `primary OutputState <OutputState_Primary>` is used.
        """
        monitored_output_states_specs = list(monitored_output_states_specs)

        # FIX: NEEDS TO RETURN output_states (?IN ADDITION TO input_states) SO THAT IF CALLED BY ControlMechanism THAT
        # FIX:  BELONGS TO A SYSTEM, THE ControlMechanism CAN CALL System._validate_monitored_state_in_system
        # FIX:  ON THE output_states ADDED
        return self._instantiate_input_states(
            monitored_output_states_specs=monitored_output_states_specs,
            context=context)

    def _instantiate_attributes_after_function(self, context=None):
        """Assign InputState weights and exponents to ObjectiveMechanism's function
        """
        super()._instantiate_attributes_after_function(context=context)
        self._instantiate_function_weights_and_exponents(context=context)

    def _instantiate_function_weights_and_exponents(self, context=None):
        """Assign weights and exponents to ObjectiveMechanism's function if it has those attributes

        For each, only make assignment if one or more entries in it has been assigned a value
        If any one value has been assigned, assign default value (1) to all other elements
        """
        DEFAULT_WEIGHT = 1
        DEFAULT_EXPONENT = 1

        weights = [input_state.weight for input_state in self.input_states]
        exponents = [input_state.exponent for input_state in self.input_states]

        if hasattr(self.function_object, WEIGHTS):
            if any(weight is not None for weight in weights):
                self.function_object.weights = [
                    weight or DEFAULT_WEIGHT for weight in weights
                ]
        if hasattr(self.function_object, EXPONENTS):
            if any(exponent is not None for exponent in exponents):
                self.function_object.exponents = [
                    exponent or DEFAULT_EXPONENT for exponent in exponents
                ]

    @property
    def monitored_output_states(self):
        if not isinstance(self.input_states, ContentAddressableList):
            return None
        else:
            monitored_output_states = []
            for input_state in self.input_states:
                for projection in input_state.path_afferents:
                    monitored_output_states.append(projection.sender)

            return ContentAddressableList(
                component_type=OutputState,
                list=[
                    projection.sender for input_state in self.input_states
                    for projection in input_state.path_afferents
                ])

    @property
    def monitored_output_states_weights_and_exponents(self):
        if hasattr(self.function_object,
                   WEIGHTS) and self.function_object.weights is not None:
            weights = self.function_object.weights
        else:
            weights = [input_state.weight for input_state in self.input_states]
        if hasattr(self.function_object,
                   EXPONENTS) and self.function_object.exponents is not None:
            exponents = self.function_object.exponents
        else:
            exponents = [
                input_state.exponent for input_state in self.input_states
            ]
        return [(w, e) for w, e in zip(weights, exponents)]

    @monitored_output_states_weights_and_exponents.setter
    def monitored_output_states_weights_and_exponents(
            self, weights_and_exponents_tuples):

        weights = [w[0] for w in weights_and_exponents_tuples]
        exponents = [e[1] for e in weights_and_exponents_tuples]
        self._instantiate_weights_and_exponents(weights, exponents)
Esempio n. 9
0
class IntegratorMechanism(ProcessingMechanism_Base):
    """
    IntegratorMechanism(                            \
    default_variable=None,                               \
    size=None,                                              \
    function=AdaptiveIntegrator(rate=0.5), \
    params=None,                                            \
    name=None,                                              \
    prefs=None)

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that integrates its input.

    COMMENT:
        Description:
            - DOCUMENT:

        Class attributes:
            + componentType (str): SigmoidLayer
            + classPreference (PreferenceSet): SigmoidLayer_PreferenceSet, instantiated in __init__()
            + classPreferenceLevel (PreferenceLevel): PreferenceLevel.TYPE
            + ClassDefaults.variable (value):  SigmoidLayer_DEFAULT_BIAS
            + paramClassDefaults (dict): {FUNCTION_PARAMS:{kwSigmoidLayer_Unitst: kwSigmoidLayer_NetInput
                                                                     kwSigmoidLayer_Gain: SigmoidLayer_DEFAULT_GAIN
                                                                     kwSigmoidLayer_Bias: SigmoidLayer_DEFAULT_BIAS}}
        Class methods:
            None

        MechanismRegistry:
           All instances of SigmoidLayer are registered in MechanismRegistry, which maintains an entry for the subclass,
              a count for all instances of it, and a dictionary of those instances

    COMMENT

    Arguments
    ---------

    default_variable : number, list or np.ndarray
        the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` methods;
        also serves as a template to specify the length of `variable <IntegratorMechanism.variable>` for
        `function <IntegratorMechanism.function>`, and the `primary outputState <OutputState_Primary>` of the
        Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            T1 = TransferMechanism(size = [3, 2])
            T2 = TransferMechanism(default_variable = [[0, 0, 0], [0, 0]])

    function : IntegratorFunction : default Integrator
        specifies the function used to integrate the input.  Must take a single numeric value, or a list or np.array
        of values, and return one of the same form.

    params : Dict[param keyword, param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, parameters for its `function <IntegratorMechanism.function>`, and/or a custom function and its
        parameters.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    name : str : default see `name <IntegratorMechanism.name>`
        specifies the name of the IntegratorMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the IntegratorMechanism; see `prefs <IntegratorMechanism.prefs>` for details.

    Attributes
    ----------
    variable : value: default
        the input to Mechanism's ``function``.

    name : str
        the name of the IntegratorMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the IntegratorMechanism; if it is not specified in the **prefs** argument of the 
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet 
        <LINK>` for details).

    """

    componentType = INTEGRATOR_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'IntegratorMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(True, PreferenceLevel.INSTANCE)
    }

    class ClassDefaults(ProcessingMechanism_Base.ClassDefaults):
        # Sets template for variable (input)
        variable = [[0]]

    paramClassDefaults = ProcessingMechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({
        # TIME_SCALE: TimeScale.TRIAL,
        OUTPUT_STATES: [PREDICTION_MECHANISM_OUTPUT]
    })

    from psyneulink.components.functions.function import AdaptiveIntegrator

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 function=AdaptiveIntegrator(rate=0.5),
                 time_scale=TimeScale.TRIAL,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=None):
        """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__
        """

        if default_variable is None and size is None:
            default_variable = self.ClassDefaults.variable

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        # self.ClassDefaults.variable = default_variable or [[0]]
        params = self._assign_args_to_param_dicts(function=function,
                                                  params=params)

        # if default_variable is NotImplemented:
        #     default_variable = SigmoidLayer_DEFAULT_NET_INPUT

        # self.size = size

        super(IntegratorMechanism, self).__init__(variable=default_variable,
                                                  size=size,
                                                  params=params,
                                                  name=name,
                                                  prefs=prefs,
                                                  context=self)

        # IMPLEMENT: INITIALIZE LOG ENTRIES, NOW THAT ALL PARTS OF THE MECHANISM HAVE BEEN INSTANTIATED

    # MODIFIED 6/2/17 NEW:
    @property
    def previous_value(self):
        return self.function_object.previous_value
class CompositionInterfaceMechanism(ProcessingMechanism_Base):
    """
    CompositionInterfaceMechanism(                            \
    default_input_value=None,                               \
    size=None,                                              \
    function=Linear(slope = 1.0, intercept = 0.0), \
    params=None,                                            \
    name=None,                                              \
    prefs=None)

    Implements the CompositionInterfaceMechanism subclass of Mechanism.

    Arguments
    ---------

    default_input_value : number, list or np.ndarray
        the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` methods;
        also serves as a template to specify the length of `variable <CompositionInterfaceMechanism.variable>` for
        `function <CompositionInterfaceMechanism.function>`, and the `primary outputState <OutputState_Primary>` of the
        Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_input_value as array(s) of zeros if **default_input_value** is not passed as an argument;
        if **default_input_value** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            T1 = TransferMechanism(size = [3, 2])
            T2 = TransferMechanism(default_variable = [[0, 0, 0], [0, 0]])

    function : IntegratorFunction : default Integrator
        specifies the function used to integrate the input.  Must take a single numeric value, or a list or np.array
        of values, and return one of the same form.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specifying_Parameters>` that can be used to specify the parameters for
        the `Mechanism <Mechanism>`, parameters for its `function <CompositionInterfaceMechanism.function>`, and/or a
        custom function and its parameters.  Values specified for parameters in the dictionary override any assigned
        to those parameters in arguments of the constructor.

    name : str : default see `name <CompositionInterfaceMechanism.name>`
        specifies the name of the CompositionInterfaceMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the CompositionInterfaceMechanism; see `prefs <CompositionInterfaceMechanism.prefs>` for details.

    Attributes
    ----------
    variable : value: default
        the input to Mechanism's ``function``.

    name : str
        the name of the CompositionInterfaceMechanism; if it is not specified in the **name** argument of the
        constructor, a default is assigned by MechanismRegistry (see `Naming` for conventions used for default and
        duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the CompositionInterfaceMechanism; if it is not specified in the **prefs** argument of
        the constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).

    """

    componentType = COMPOSITION_INTERFACE_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName:
        'CompositionInterfaceMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(True, PreferenceLevel.INSTANCE)
    }

    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({})
    paramNames = paramClassDefaults.keys()

    @tc.typecheck
    def __init__(self,
                 default_input_value=None,
                 size=None,
                 function=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        if default_input_value is None and size is None:
            default_input_value = self.ClassDefaults.variable

        params = self._assign_args_to_param_dicts(function=function,
                                                  params=params)

        super(CompositionInterfaceMechanism,
              self).__init__(variable=default_input_value,
                             size=size,
                             params=params,
                             function=function,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
#
# ******************************************** SystemPreferenceSet **************************************************

from psyneulink.globals.preferences.componentpreferenceset import ComponentPreferenceSet, ComponentPreferenceSetPrefs
from psyneulink.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel

__all__ = [
    'SystemPreferenceSet', 'recordSimulationPrefCategoryDefault',
    'recordSimulationPrefInstanceDefault', 'recordSimulationPrefTypeDefault',
    'RECORD_SIMULATION_PREF', 'kpRecordSimulationPref'
]

RECORD_SIMULATION_PREF = kpRecordSimulationPref = '_record_simulation_pref'

# Defaults ffor recordSimulationPref:
recordSimulationPrefInstanceDefault = PreferenceEntry(False,
                                                      PreferenceLevel.INSTANCE)
recordSimulationPrefTypeDefault = PreferenceEntry(False,
                                                  PreferenceLevel.INSTANCE)
recordSimulationPrefCategoryDefault = PreferenceEntry(False,
                                                      PreferenceLevel.INSTANCE)

SystemPreferenceSetPrefs = ComponentPreferenceSetPrefs.copy()
SystemPreferenceSetPrefs.add(kpRecordSimulationPref)


def is_sys_pref(pref):
    return pref in SystemPreferenceSetPrefs


def is_sys_pref_set(pref):
    if pref is None:
Esempio n. 12
0
def test_EVC_gratton():
    def test_search_function(controller=None, **kwargs):
        result = np.array(controller.allocationPolicy).reshape(
            len(controller.allocationPolicy), -1)
        return result

    def test_outcome_function(**kwargs):
        result = np.array([0])
        return result

    # Preferences:
    mechanism_prefs = ComponentPreferenceSet(
        prefs={
            kpVerbosePref: PreferenceEntry(False, PreferenceLevel.INSTANCE),
            kpReportOutputPref: PreferenceEntry(False,
                                                PreferenceLevel.INSTANCE)
        })

    process_prefs = ComponentPreferenceSet(
        reportOutput_pref=PreferenceEntry(False, PreferenceLevel.INSTANCE),
        verbose_pref=PreferenceEntry(True, PreferenceLevel.INSTANCE))

    # Control Parameters
    signalSearchRange = np.arange(1.0, 2.0, 0.2)

    # Stimulus Mechanisms
    Target_Stim = TransferMechanism(name='Target Stimulus',
                                    function=Linear(slope=0.3324))
    Flanker_Stim = TransferMechanism(name='Flanker Stimulus',
                                     function=Linear(slope=0.3545221843))

    # Processing Mechanisms (Control)
    Target_Rep = TransferMechanism(
        name='Target Representation',
        function=Linear(
            slope=(1.0,
                   ControlProjection(function=Linear,
                                     control_signal_params={
                                         ALLOCATION_SAMPLES: signalSearchRange
                                     }))),
        prefs=mechanism_prefs)
    Flanker_Rep = TransferMechanism(
        name='Flanker Representation',
        function=Linear(
            slope=(1.0,
                   ControlProjection(function=Linear,
                                     control_signal_params={
                                         ALLOCATION_SAMPLES: signalSearchRange
                                     }))),
        prefs=mechanism_prefs)

    # Processing Mechanism (Automatic)
    Automatic_Component = TransferMechanism(name='Automatic Component',
                                            function=Linear(slope=(1.0)),
                                            prefs=mechanism_prefs)

    # Decision Mechanisms
    Decision = DDM(
        function=BogaczEtAl(drift_rate=(1.0),
                            threshold=(0.2645),
                            noise=(0.5),
                            starting_point=(0),
                            t0=0.15),
        prefs=mechanism_prefs,
        name='Decision',
        output_states=[
            DECISION_VARIABLE, RESPONSE_TIME, PROBABILITY_UPPER_THRESHOLD
        ],
    )

    # Outcome Mechanisms:
    Reward = TransferMechanism(name='Reward')

    # Processes:
    TargetControlProcess = Process(default_variable=[0],
                                   pathway=[Target_Stim, Target_Rep, Decision],
                                   prefs=process_prefs,
                                   name='Target Control Process')

    FlankerControlProcess = Process(
        default_variable=[0],
        pathway=[Flanker_Stim, Flanker_Rep, Decision],
        prefs=process_prefs,
        name='Flanker Control Process')

    TargetAutomaticProcess = Process(
        default_variable=[0],
        pathway=[Target_Stim, Automatic_Component, Decision],
        prefs=process_prefs,
        name='Target Automatic Process')

    FlankerAutomaticProcess = Process(
        default_variable=[0],
        pathway=[Flanker_Stim, Automatic_Component, Decision],
        prefs=process_prefs,
        name='Flanker1 Automatic Process')

    RewardProcess = Process(default_variable=[0],
                            pathway=[Reward],
                            prefs=process_prefs,
                            name='RewardProcess')

    # System:
    mySystem = System(
        processes=[
            TargetControlProcess, FlankerControlProcess,
            TargetAutomaticProcess, FlankerAutomaticProcess, RewardProcess
        ],
        controller=EVCControlMechanism,
        enable_controller=True,
        monitor_for_control=[
            Reward, (Decision.PROBABILITY_UPPER_THRESHOLD, 1, -1)
        ],
        # monitor_for_control=[Reward, DDM_PROBABILITY_UPPER_THRESHOLD, (DDM_RESPONSE_TIME, -1, 1)],
        name='EVC Gratton System')

    # Show characteristics of system:
    mySystem.show()
    mySystem.controller.show()

    # mySystem.show_graph(show_control=True)

    # configure EVC components
    mySystem.controller.control_signals[
        0].intensity_cost_function = Exponential(rate=0.8046).function
    mySystem.controller.control_signals[
        1].intensity_cost_function = Exponential(rate=0.8046).function

    for mech in mySystem.controller.prediction_mechanisms.mechanisms:
        if mech.name == 'Flanker Stimulus Prediction Mechanism' or mech.name == 'Target Stimulus Prediction Mechanism':
            # when you find a key mechanism (transfer mechanism) with the correct name, print its name
            print(mech.name)
            mech.function_object.rate = 1.0

        if 'Reward' in mech.name:
            print(mech.name)
            mech.function_object.rate = 1.0
            # mySystem.controller.prediction_mechanisms[mech].parameterStates['rate'].base_value = 1.0

    print('new rate of integration mechanisms before System execution:')
    # for mech in mySystem.controller.prediction_mechanisms.keys():
    for mech in mySystem.controller.prediction_mechanisms.mechanisms:
        print(mech.name)
        print(mech.function_object.rate)
        print('----')

    # generate stimulus environment

    nTrials = 3
    targetFeatures = [1, 1, 1]
    flankerFeatures = [1, -1,
                       1]  # for full simulation: flankerFeatures = [-1,1]
    reward = [100, 100, 100]

    targetInputList = targetFeatures
    flankerInputList = flankerFeatures
    rewardList = reward

    # targetInputList = np.random.choice(targetFeatures, nTrials).tolist()
    # flankerInputList = np.random.choice(flankerFeatures, nTrials).tolist()
    # rewardList = (np.ones(nTrials) * reward).tolist() #np.random.choice(reward, nTrials).tolist()

    stim_list_dict = {
        Target_Stim: targetInputList,
        Flanker_Stim: flankerInputList,
        Reward: rewardList
    }

    mySystem.controller.reportOutputPref = True

    expected_results_array = [
        0.2645,
        0.32257753,
        0.94819408,
        100.,
        0.2645,
        0.31663196,
        0.95508757,
        100.,
        0.2645,
        0.31093566,
        0.96110142,
        100.,
        0.2645,
        0.30548947,
        0.96633839,
        100.,
        0.2645,
        0.30029103,
        0.97089165,
        100.,
        0.2645,
        0.3169957,
        0.95468427,
        100.,
        0.2645,
        0.31128378,
        0.9607499,
        100.,
        0.2645,
        0.30582202,
        0.96603252,
        100.,
        0.2645,
        0.30060824,
        0.9706259,
        100.,
        0.2645,
        0.29563774,
        0.97461444,
        100.,
        0.2645,
        0.31163288,
        0.96039533,
        100.,
        0.2645,
        0.30615555,
        0.96572397,
        100.,
        0.2645,
        0.30092641,
        0.97035779,
        100.,
        0.2645,
        0.2959409,
        0.97438178,
        100.,
        0.2645,
        0.29119255,
        0.97787196,
        100.,
        0.2645,
        0.30649004,
        0.96541272,
        100.,
        0.2645,
        0.30124552,
        0.97008732,
        100.,
        0.2645,
        0.29624499,
        0.97414704,
        100.,
        0.2645,
        0.29148205,
        0.97766847,
        100.,
        0.2645,
        0.28694892,
        0.98071974,
        100.,
        0.2645,
        0.30156558,
        0.96981445,
        100.,
        0.2645,
        0.29654999,
        0.97391021,
        100.,
        0.2645,
        0.29177245,
        0.97746315,
        100.,
        0.2645,
        0.28722523,
        0.98054192,
        100.,
        0.2645,
        0.28289958,
        0.98320731,
        100.,
        0.2645,
        0.28289958,
        0.98320731,
        100.,
        0.2645,
        0.42963678,
        0.47661181,
        100.,
        0.2645,
        0.42846471,
        0.43938586,
        100.,
        -0.2645,
        0.42628176,
        0.40282965,
        100.,
        0.2645,
        0.42314468,
        0.36732207,
        100.,
        -0.2645,
        0.41913221,
        0.333198,
        100.,
        0.2645,
        0.42978939,
        0.51176048,
        100.,
        0.2645,
        0.42959394,
        0.47427693,
        100.,
        -0.2645,
        0.4283576,
        0.43708106,
        100.,
        0.2645,
        0.4261132,
        0.40057958,
        100.,
        -0.2645,
        0.422919,
        0.36514906,
        100.,
        0.2645,
        0.42902209,
        0.54679323,
        100.,
        0.2645,
        0.42980788,
        0.50942101,
        100.,
        -0.2645,
        0.42954704,
        0.47194318,
        100.,
        -0.2645,
        0.42824656,
        0.43477897,
        100.,
        0.2645,
        0.42594094,
        0.3983337,
        100.,
        -0.2645,
        0.42735293,
        0.58136855,
        100.,
        -0.2645,
        0.42910149,
        0.54447221,
        100.,
        0.2645,
        0.42982229,
        0.50708112,
        100.,
        -0.2645,
        0.42949608,
        0.46961065,
        100.,
        -0.2645,
        0.42813159,
        0.43247968,
        100.,
        -0.2645,
        0.42482049,
        0.61516258,
        100.,
        0.2645,
        0.42749136,
        0.57908829,
        100.,
        0.2645,
        0.42917687,
        0.54214925,
        100.,
        -0.2645,
        0.42983261,
        0.50474093,
        100.,
        -0.2645,
        0.42944107,
        0.46727945,
        100.,
        -0.2645,
        0.42944107,
        0.46727945,
        100.,
        0.2645,
        0.32257753,
        0.94819408,
        100.,
        0.2645,
        0.31663196,
        0.95508757,
        100.,
        0.2645,
        0.31093566,
        0.96110142,
        100.,
        0.2645,
        0.30548947,
        0.96633839,
        100.,
        0.2645,
        0.30029103,
        0.97089165,
        100.,
        0.2645,
        0.3169957,
        0.95468427,
        100.,
        0.2645,
        0.31128378,
        0.9607499,
        100.,
        0.2645,
        0.30582202,
        0.96603252,
        100.,
        0.2645,
        0.30060824,
        0.9706259,
        100.,
        0.2645,
        0.29563774,
        0.97461444,
        100.,
        0.2645,
        0.31163288,
        0.96039533,
        100.,
        0.2645,
        0.30615555,
        0.96572397,
        100.,
        0.2645,
        0.30092641,
        0.97035779,
        100.,
        0.2645,
        0.2959409,
        0.97438178,
        100.,
        0.2645,
        0.29119255,
        0.97787196,
        100.,
        0.2645,
        0.30649004,
        0.96541272,
        100.,
        0.2645,
        0.30124552,
        0.97008732,
        100.,
        0.2645,
        0.29624499,
        0.97414704,
        100.,
        0.2645,
        0.29148205,
        0.97766847,
        100.,
        0.2645,
        0.28694892,
        0.98071974,
        100.,
        0.2645,
        0.30156558,
        0.96981445,
        100.,
        0.2645,
        0.29654999,
        0.97391021,
        100.,
        0.2645,
        0.29177245,
        0.97746315,
        100.,
        0.2645,
        0.28722523,
        0.98054192,
        100.,
        0.2645,
        0.28289958,
        0.98320731,
        100.,
        0.2645,
        0.28289958,
        0.98320731,
        100.,
    ]

    Flanker_Rep.set_log_conditions((SLOPE, CONTROL))

    mySystem.run(
        num_trials=nTrials,
        inputs=stim_list_dict,
    )

    np.testing.assert_allclose(
        pytest.helpers.expand_np_ndarray(mySystem.results),
        expected_results_array,
        atol=1e-08,
        verbose=True,
    )
Esempio n. 13
0
class TransferMechanism(ProcessingMechanism_Base):
    """
    TransferMechanism(           \
    default_variable=None,       \
    size=None,                   \
    input_states=None,           \
    function=Linear,             \
    initial_value=None,          \
    noise=0.0,                   \
    time_constant=1.0,           \
    integrator_mode=False,       \
    clip=(float:min, float:max), \
    output_states=RESULTS        \
    params=None,                 \
    name=None,                   \
    prefs=None)

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that performs a simple transform of its input.

    COMMENT:
        Description
        -----------
            TransferMechanism is a Subtype of the ProcessingMechanism Type of the Mechanism Category of the
                Component class
            It implements a Mechanism that transforms its input variable based on FUNCTION (default: Linear)

        Class attributes
        ----------------
            + componentType (str): TransferMechanism
            + classPreference (PreferenceSet): Transfer_PreferenceSet, instantiated in __init__()
            + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE
            + ClassDefaults.variable (value):  Transfer_DEFAULT_BIAS

        Class methods
        -------------
            None

        MechanismRegistry
        -----------------
            All instances of TransferMechanism are registered in MechanismRegistry, which maintains an
              entry for the subclass, a count for all instances of it, and a dictionary of those instances
    COMMENT

    Arguments
    ---------

    default_variable : number, list or np.ndarray : default Transfer_DEFAULT_BIAS
        specifies the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` method;
        also serves as a template to specify the length of `variable <TransferMechanism.variable>` for
        `function <TransferMechanism.function>`, and the `primary outputState <OutputState_Primary>`
        of the Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            T1 = TransferMechanism(size = [3, 2])
            T2 = TransferMechanism(default_variable = [[0, 0, 0], [0, 0]])

    input_states : str, list or np.ndarray
        specifies the InputStates for the TransferMechanism; by default, a single InputState is created using the
        value of default_variable as its `variable <InputState.variable>`;  if more than one is specified, the number
        and, if specified, their values must be compatible with any specifications in **default_variable** or
        **size** (see `Mechanism_InputStates`);  see `input_states <TransferMechanism.output_states>` for additional
        details.

    function : TransferFunction : default Linear
        specifies the function used to transform the input;  can be `Linear`, `Logistic`, `Exponential`,
        or a custom function.

    initial_value :  value, list or np.ndarray : default Transfer_DEFAULT_BIAS
        specifies the starting value for time-averaged input (only relevant if `integrator_mode
        <TransferMechanism.integrator_mode>` is True).
        COMMENT:
            Transfer_DEFAULT_BIAS SHOULD RESOLVE TO A VALUE
        COMMENT

    noise : float or function : default 0.0
        a stochastically-sampled value added to the result of the `function <TransferMechanism.function>`:
        if it is a float, it must be in the interval [0,1] and is used to scale the variance of a zero-mean Gaussian;
        if it is a function, it must return a scalar value.

    time_constant : float : default 1.0
        the time constant for exponential time averaging of input when the Mechanism is executed with `integrator_mode`
        set to True::

         result = (time_constant * current input) + ((1-time_constant) * result on previous time_step)

    clip : Optional[Tuple[float, float]]
        specifies the allowable range for the result of `function <TransferMechanism.function>`:
        the first item specifies the minimum allowable value of the result, and the second its maximum allowable value;
        any element of the result that exceeds the specified minimum or maximum value is set to the value of
        `clip <TransferMechanism.clip>` that it exceeds.

    output_states : str, list or np.ndarray : default RESULTS
        specifies the OutputStates for the TransferMechanism; by default, one is created for each InputState
        specified in **input_states**;  see `note <TransferMechanism_OutputStates_Note>`, and `output_states
        <TransferMechanism.output_states>` for additional details).

    params : Dict[param keyword, param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, its `function <Mechanism_Base.function>`, and/or a custom function and its parameters.  Values
        specified for parameters in the dictionary override any assigned to those parameters in arguments of the
        constructor.

    name : str : default see `name <TransferMechanism.name>`
        specifies the name of the TransferMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the TransferMechanism; see `prefs <TransferMechanism.prefs>` for details.

    context : str : default componentType+INITIALIZING
        string used for contextualization of instantiation, hierarchical calls, executions, etc.

    Returns
    -------
    instance of TransferMechanism : TransferMechanism


    Attributes
    ----------

    variable : value
        the input to Mechanism's `function <TransferMechanism.function>`.
        COMMENT:
            :py:data:`Transfer_DEFAULT_BIAS <LINK->SHOULD RESOLVE TO VALUE>`
        COMMENT

    input_states : *ContentAddressableList[InputState]*
        list of Mechanism's `InputStates <InputStates>` (see `TransferMechanism_InputStates` for additional details).

    function : Function
        the Function used to transform the input.

    COMMENT:
       THE FOLLOWING IS THE CURRENT ASSIGNMENT
    COMMENT
    initial_value :  value, list or np.ndarray : Transfer_DEFAULT_BIAS
        specifies the starting value for time-averaged input (only relevant if `integrator_mode
        <TransferMechanism.integrator_mode>` is True and `time_constant <TransferMechanism.time_constant>` is not 1.0).
        COMMENT:
            Transfer_DEFAULT_BIAS SHOULD RESOLVE TO A VALUE
        COMMENT

    noise : float or function
        a stochastically-sampled value added to the output of the `function <TransferMechanism.function>`:
        if it is a float, it must be in the interval [0,1] and is used to scale the variance of a zero-mean Gaussian;
        if it is a function, it must return a scalar value.

    time_constant : float
        the time constant for exponential time averaging of input when the Mechanism is executed with `integrator_mode`
        set to True::

          result = (time_constant * current input) + ( (1-time_constant) * result on previous time_step)

    integrator_mode : booleane
        when set to True, the Mechanism time averages its input according to an exponentially weighted moving average
        (see `time_constant <TransferMechanisms.time_constant>`).

    clip : Optional[Tuple[float, float]]
        determines the allowable range of the result: the first value specifies the minimum allowable value
        and the second the maximum allowable value;  any element of the result that exceeds minimum or maximum
        is set to the value of `clip <TransferMechanism.clip>` it exceeds.  If `function <TransferMechanism.function>`
        is `Logistic`, `clip <TransferMechanism.clip>` is set by default to (0,1).

    value : 2d np.array [array(float64)]
        result of executing `function <TransferMechanism.function>`.

    previous_value : float
        the `value <TransferMechanism.value>` on the previous execution of the Mechanism.

    delta : float
        the change in `value <TransferMechanism.value>` from the previous execution of the Mechanism
        (i.e., `value <TransferMechanism.value>` - `previous_value <TransferMechanism.previous_value>`).

    output_states : *ContentAddressableList[OutputState]*
        list of Mechanism's `OutputStates <OutputStates>`; by default there is one OutputState for each InputState,
        with the base name `RESULT` (see `TransferMechanism_OutputStates` for additional details).

    output_values : List[array(float64)]
        each item is the `value <OutputState.value>` of the corresponding OutputState in `output_states
        <TransferMechanism.output_states>`.  The default is a single item containing the result of the
        TransferMechanism's `function <TransferMechanism.function>`;  additional
        ones may be included, based on the specifications made in the
        **output_states** argument of the Mechanism's constructor (see `TransferMechanism Standard OutputStates
        <TransferMechanism_Standard_OutputStates>`).

    name : str
        the name of the TransferMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the TransferMechanism; if it is not specified in the **prefs** argument of the 
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet 
        <LINK>` for details).

    """

    componentType = TRANSFER_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName:
        'TransferCustomClassPreferences',
        kpReportOutputPref:
        PreferenceEntry(False, PreferenceLevel.INSTANCE),
        kpRuntimeParamStickyAssignmentPref:
        PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    # TransferMechanism parameter and control signal assignments):
    paramClassDefaults = ProcessingMechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({NOISE: None})

    standard_output_states = standard_output_states.copy()

    class ClassDefaults(ProcessingMechanism_Base.ClassDefaults):
        variable = [[0]]

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(
                     tc.any(Iterable, Mechanism, OutputState,
                            InputState)) = None,
                 function=Linear,
                 initial_value=None,
                 noise=0.0,
                 time_constant=1.0,
                 integrator_mode=False,
                 clip=None,
                 output_states: tc.optional(tc.any(str, Iterable)) = RESULTS,
                 time_scale=TimeScale.TRIAL,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        """Assign type-level preferences and call super.__init__
        """

        # Default output_states is specified in constructor as a string rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if output_states is None or output_states is RESULTS:
            output_states = [RESULTS]

        params = self._assign_args_to_param_dicts(
            function=function,
            initial_value=initial_value,
            input_states=input_states,
            output_states=output_states,
            noise=noise,
            time_constant=time_constant,
            integrator_mode=integrator_mode,
            time_scale=time_scale,
            clip=clip,
            params=params)

        self.integrator_function = None

        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super(TransferMechanism, self).__init__(
            variable=default_variable,
            size=size,
            params=params,
            name=name,
            prefs=prefs,
            context=self,
            input_states=input_states,
        )

    def _validate_params(self, request_set, target_set=None, context=None):
        """Validate FUNCTION and Mechanism params

        """

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

        # Validate FUNCTION
        if FUNCTION in target_set:
            transfer_function = target_set[FUNCTION]
            # FUNCTION is a Function
            if isinstance(transfer_function, Component):
                transfer_function_class = transfer_function.__class__
                transfer_function_name = transfer_function.__class__.__name__
            # FUNCTION is a function or method
            elif isinstance(transfer_function, (function_type, method_type)):
                transfer_function_class = transfer_function.__self__.__class__
                transfer_function_name = transfer_function.__self__.__class__.__name__
            # FUNCTION is a class
            elif inspect.isclass(transfer_function):
                transfer_function_class = transfer_function
                transfer_function_name = transfer_function.__name__

            if not transfer_function_class.componentType is TRANSFER_FUNCTION_TYPE and not transfer_function_class.componentType is NORMALIZING_FUNCTION_TYPE:
                raise TransferError(
                    "Function {} specified as FUNCTION param of {} must be a {}"
                    .format(transfer_function_name, self.name,
                            TRANSFER_FUNCTION_TYPE))

        # Validate INITIAL_VALUE
        if INITIAL_VALUE in target_set:
            initial_value = target_set[INITIAL_VALUE]
            if initial_value is not None:
                if not iscompatible(initial_value,
                                    self.instance_defaults.variable):
                    raise Exception(
                        "initial_value is {}, type {}\nself.instance_defaults.variable is {}, type {}"
                        .format(
                            initial_value,
                            type(initial_value).__name__,
                            self.instance_defaults.variable,
                            type(self.instance_defaults.variable).__name__,
                        ))
                    raise TransferError(
                        "The format of the initial_value parameter for {} ({}) must match its input ({})"
                        .format(
                            append_type_to_name(self),
                            initial_value,
                            self.instance_defaults.variable[0],
                        ))

        # FIX: SHOULD THIS (AND TIME_CONSTANT) JUST BE VALIDATED BY INTEGRATOR FUNCTION NOW THAT THEY ARE PROPERTIES??
        # Validate NOISE:
        if NOISE in target_set:
            self._validate_noise(target_set[NOISE],
                                 self.instance_defaults.variable)
        # Validate TIME_CONSTANT:
        if TIME_CONSTANT in target_set:
            time_constant = target_set[TIME_CONSTANT]
            if (not (isinstance(time_constant, float)
                     and 0 <= time_constant <= 1)) and (time_constant != None):
                raise TransferError(
                    "time_constant parameter ({}) for {} must be a float between 0 and 1"
                    .format(time_constant, self.name))

        # Validate RANGE:
        if CLIP in target_set:
            clip = target_set[CLIP]
            if clip:
                if not (isinstance(clip, tuple) and len(clip) == 2
                        and all(isinstance(i, numbers.Number) for i in clip)):
                    raise TransferError(
                        "clip parameter ({}) for {} must be a tuple with two numbers"
                        .format(clip, self.name))
                if not clip[0] < clip[1]:
                    raise TransferError(
                        "The first item of the clip parameter ({}) must be less than the second"
                        .format(clip, self.name))

        # self.integrator_function = Integrator(
        #     # default_variable=self.default_variable,
        #                                       initializer = self.instance_defaults.variable,
        #                                       noise = self.noise,
        #                                       rate = self.time_constant,
        #                                       integration_type= ADAPTIVE)

    def _validate_noise(self, noise, var):
        # Noise is a list or array
        if isinstance(noise, (np.ndarray, list)):
            if len(noise) == 1:
                pass
            # Variable is a list/array
            elif not iscompatible(np.atleast_2d(noise),
                                  var) and len(noise) > 1:
                raise MechanismError(
                    "Noise parameter ({}) does not match default variable ({}). Noise parameter of {} must be specified"
                    " as a float, a function, or an array of the appropriate shape ({})."
                    .format(noise, self.instance_defaults.variable, self.name,
                            np.shape(np.array(var))))
            else:
                for noise_item in noise:
                    if not isinstance(
                            noise_item,
                        (float, int)) and not callable(noise_item):
                        raise MechanismError(
                            "The elements of a noise list or array must be floats or functions. {} is not a valid noise"
                            " element for {}".format(noise_item, self.name))

        elif _is_control_spec(noise):
            pass

        # Otherwise, must be a float, int or function
        elif not isinstance(noise, (float, int)) and not callable(noise):
            raise MechanismError(
                "Noise parameter ({}) for {} must be a float, "
                "function, or array/list of these.".format(noise, self.name))

    def _try_execute_param(self, param, var):

        # param is a list; if any element is callable, execute it
        if isinstance(param, (np.ndarray, list)):
            # NOTE: np.atleast_2d will cause problems if the param has "rows" of different lengths
            param = np.atleast_2d(param)
            for i in range(len(param)):
                for j in range(len(param[i])):
                    if callable(param[i][j]):
                        param[i][j] = param[i][j]()

        # param is one function
        elif callable(param):
            # NOTE: np.atleast_2d will cause problems if the param has "rows" of different lengths
            new_param = []
            for row in np.atleast_2d(var):
                new_row = []
                for item in row:
                    new_row.append(param())
                new_param.append(new_row)
            param = new_param

        return param

    def _instantiate_parameter_states(self, context=None):

        from psyneulink.components.functions.function import Logistic
        # If function is a logistic, and clip has not been specified, bound it between 0 and 1
        if ((isinstance(self.function, Logistic) or
             (inspect.isclass(self.function)
              and issubclass(self.function, Logistic))) and self.clip is None):
            self.clip = (0, 1)

        super()._instantiate_parameter_states(context=context)

    def _instantiate_attributes_before_function(self, context=None):

        super()._instantiate_attributes_before_function(context=context)

        if self.initial_value is None:
            self.initial_value = self.instance_defaults.variable

    def _instantiate_output_states(self, context=None):
        # If user specified more than one item for variable, but did not specify any custom OutputStates
        # then assign one OutputState (with the default name, indexed by the number of them) per item of variable
        if len(self.variable) > 1 and len(
                self.output_states) == 1 and self.output_states[0] == RESULTS:
            self.output_states = []
            for i, item in enumerate(self.variable):
                self.output_states.append({NAME: RESULT, INDEX: i})
        super()._instantiate_output_states(context=context)

    def _execute(self,
                 variable=None,
                 runtime_params=None,
                 clock=CentralClock,
                 time_scale=TimeScale.TRIAL,
                 context=None):
        """Execute TransferMechanism function and return transform of input

        Execute TransferMechanism function on input, and assign to output_values:
            - Activation value for all units
            - Mean of the activation values across units
            - Variance of the activation values across units
        Return:
            value of input transformed by TransferMechanism function in outputState[TransferOuput.RESULT].value
            mean of items in RESULT outputState[TransferOuput.MEAN].value
            variance of items in RESULT outputState[TransferOuput.VARIANCE].value

        Arguments:

        # CONFIRM:
        variable (float): set to self.value (= self.input_value)
        - params (dict):  runtime_params passed from Mechanism, used as one-time value for current execution:
            + NOISE (float)
            + TIME_CONSTANT (float)
            + RANGE ([float, float])
        - context (str)

        Returns the following values in self.value (2D np.array) and in
            the value of the corresponding outputState in the self.output_states list:
            - activation value (float)
            - mean activation value (float)
            - standard deviation of activation values (float)

        :param self:
        :param variable (float)
        :param params: (dict)
        :param context: (str)
        :rtype self.outputState.value: (number)
        """

        # FIX: ??CALL check_args()??

        # FIX: IS THIS CORRECT?  SHOULD THIS BE SET TO INITIAL_VALUE
        # FIX:     WHICH SHOULD BE DEFAULTED TO 0.0??
        # Use self.instance_defaults.variable to initialize state of input

        # FIX: NEED TO GET THIS TO WORK WITH CALL TO METHOD:
        integrator_mode = self.integrator_mode

        #region ASSIGN PARAMETER VALUES

        time_constant = self.time_constant
        clip = self.clip
        noise = self.noise
        #endregion

        #region EXECUTE TransferMechanism FUNCTION ---------------------------------------------------------------------

        # FIX: NOT UPDATING self.previous_input CORRECTLY
        # FIX: SHOULD UPDATE PARAMS PASSED TO integrator_function WITH ANY RUNTIME PARAMS THAT ARE RELEVANT TO IT

        # Update according to time-scale of integration
        if integrator_mode:
            # if time_scale is TimeScale.TIME_STEP:

            if not self.integrator_function:

                self.integrator_function = AdaptiveIntegrator(
                    variable,
                    initializer=self.initial_value,
                    noise=self.noise,
                    rate=self.time_constant,
                    owner=self)

            current_input = self.integrator_function.execute(
                variable,
                # Should we handle runtime params?
                params={
                    INITIALIZER: self.initial_value,
                    NOISE: self.noise,
                    RATE: self.time_constant
                },
                context=context)
        else:
            # elif time_scale is TimeScale.TRIAL:
            noise = self._try_execute_param(self.noise, variable)
            # formerly: current_input = self.input_state.value + noise
            # (MODIFIED 7/13/17 CW) this if/else below is hacky: just allows a nicer error message
            # when the input is given as a string.
            if (np.array(noise) != 0).any():
                current_input = variable + noise
            else:
                current_input = variable

        if isinstance(self.function_object, TransferFunction):

            outputs = self.function(variable=current_input,
                                    params=runtime_params)
            # if clip is not None:
            #     print(clip)
            #     minCapIndices = np.where(outputs < clip[0])
            #     print(minCapIndices)
            #     maxCapIndices = np.where(outputs > clip[1])
            #     print(maxCapIndices)
            #     outputs[minCapIndices] = np.min(clip)
            #     outputs[maxCapIndices] = np.max(clip)
        else:
            # Apply TransferMechanism's function to each input state separately
            outputs = []
            for elem in current_input:
                output_item = self.function(variable=elem,
                                            params=runtime_params)
                # if clip is not None:
                #     minCapIndices = np.where(output_item < clip[0])
                #     maxCapIndices = np.where(output_item > clip[1])
                #     output_item[minCapIndices] = np.min(clip)
                #     output_item[maxCapIndices] = np.max(clip)
                outputs.append(output_item)

        # outputs = []
        # for elem in current_input:
        #     output_item = self.function(variable=elem, params=runtime_params)
        #     if clip is not None:
        #         minCapIndices = np.where(output_item < clip[0])
        #         maxCapIndices = np.where(output_item > clip[1])
        #         output_item[minCapIndices] = np.min(clip)
        #         output_item[maxCapIndices] = np.max(clip)
        #     outputs.append(output_item)
        return outputs
        #endregion

    def _report_mechanism_execution(self, input, params, output):
        """Override super to report previous_input rather than input, and selected params
        """
        # KAM Changed 8/29/17 print_input = self.previous_input --> print_input = input
        # because self.previous_input is not a valid attrib of TransferMechanism

        print_input = input
        print_params = params.copy()
        # Only report time_constant if in TIME_STEP mode
        if params['time_scale'] is TimeScale.TRIAL:
            del print_params[TIME_CONSTANT]
        # Suppress reporting of range (not currently used)
        del print_params[CLIP]

        super()._report_mechanism_execution(input_val=print_input,
                                            params=print_params)

    # def terminate_function(self, context=None):
    #     """Terminate the process
    #
    #     called by process.terminate() - MUST BE OVERRIDDEN BY SUBCLASS IMPLEMENTATION
    #     returns output
    #
    #     :rtype CurrentStateTuple(state, confidence, duration, controlModulatedParamValues)
    #     """
    #     # IMPLEMENTATION NOTE:  TBI when time_step is implemented for TransferMechanism
    #
    @property
    def clip(self):
        return self._clip

    @clip.setter
    def clip(self, value):
        self._clip = value

    # MODIFIED 4/17/17 NEW:
    @property
    def noise(self):
        return self._noise

    @noise.setter
    def noise(self, value):
        self._noise = value

    @property
    def time_constant(self):
        return self._time_constant

    @time_constant.setter
    def time_constant(self, value):
        self._time_constant = value

    # # MODIFIED 4/17/17 END

    @property
    def previous_value(self):
        if self.integrator_function:
            return self.integrator_function.previous_value
        return None

    @property
    def delta(self):
        if self.integrator_function:
            return self.value - self.integrator_function.previous_value
        return None
Esempio n. 14
0
class LeabraFunction(Function_Base):
    """
    LeabraFunction(             \
        default_variable=None,  \
        network=None,           \
        params=None,            \
        owner=None,             \
        prefs=None)

    .. _LeabraFunction:

    LeabraFunction is a custom function that lives inside the LeabraMechanism. As a function, it transforms the
    variable by providing it as input to the leabra network inside the LeabraFunction.

    Arguments
    ---------

    default_variable : number or np.array : default np.zeros() (array of zeros)
        specifies a template for the input to the leabra network.

    network : leabra.Network
        specifies the leabra network to be used.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that specifies the parameters for the
        function.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    owner : Component
        `component <Component>` to which to assign the Function.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the LeabraMechanism; see `prefs <LeabraMechanism.prefs>` for details.


    Attributes
    ----------

    variable : number or np.array
        contains value to be transformed.

    network : leabra.Network
        the leabra network that is being used

    owner : Mechanism
        `component <Component>` to which the Function has been assigned.

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the LeabraMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).

    """

    componentType = LEABRA_FUNCTION_TYPE
    componentName = LEABRA_FUNCTION

    multiplicative_param = NotImplemented
    additive_param = NotImplemented  # very hacky

    classPreferences = {
        kwPreferenceSetName: 'LeabraFunctionClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE),
        kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    paramClassDefaults = Function_Base.paramClassDefaults.copy()

    class ClassDefaults(Function_Base.ClassDefaults):
        variable = [[0], [0]]

    def __init__(self,
                 default_variable=None,
                 network=None,
                 params=None,
                 owner=None,
                 prefs=None):

        if not leabra_available:
            raise LeabraError('leabra python module is not installed. Please install it from '
                              'https://github.com/benureau/leabra')

        if network is None:
            raise LeabraError('network was None. Cannot create function for Leabra Mechanism if network is not specified.')

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(network=network,
                                                  params=params)

        if default_variable is None:
            input_size = len(self.network.layers[0].units)
            output_size = len(self.network.layers[-1].units)
            default_variable = [np.zeros(input_size), np.zeros(output_size)]

        super().__init__(default_variable=default_variable,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)

    def _validate_variable(self, variable, context=None):
        if not isinstance(variable, (list, np.ndarray, numbers.Number)):
            raise LeabraError("Input Error: the input variable ({}) was of type {}, but instead should be a list, "
                              "numpy array, or number.".format(variable, type(variable)))

        input_size = len(self.network.layers[0].units)
        output_size = len(self.network.layers[-1].units)
        if (not hasattr(self, "owner")) or (not hasattr(self.owner, "training_flag")) or self.owner.training_flag is False:
            if len(convert_to_2d_input(variable)[0]) != input_size:
                # convert_to_2d_input(variable[0]) is just in case variable is a 2D array rather than a vector
                raise LeabraError("Input Error: the input was {}, which was of an incompatible length with the "
                                  "input_size, which should be {}.".format(convert_to_2d_input(variable)[0], input_size))
        else:
            if len(convert_to_2d_input(variable)[0]) != input_size or len(convert_to_2d_input(variable)[1]) != output_size:
                raise LeabraError("Input Error: the input variable was {}, which was of an incompatible length with "
                                  "the input_size or output_size, which should be {} and {} respectively.".
                                  format(variable, input_size, output_size))
        return variable

    def _validate_params(self, request_set, target_set=None, context=None):
        if not isinstance(request_set[NETWORK], leabra.Network):
            raise LeabraError("Error: the network given ({}) was of type {}, but instead must be a leabra Network.".
                              format(request_set[NETWORK], type(request_set[NETWORK])))
        super()._validate_params(request_set, target_set, context)

    def function(self,
                 variable=None,
                 params=None,
                 context=None):
        variable = self._update_variable(self._check_args(variable=variable, params=params, context=context))

        # HACK: otherwise the INITIALIZING function executions impact the state of the leabra network
        if self.context.initialization_status == ContextFlags.INITIALIZING:
            output_size = len(self.network.layers[-1].units)
            return np.zeros(output_size)

        if (not hasattr(self, "owner")) or (not hasattr(self.owner, "training_flag")) or self.owner.training_flag is False:
            if isinstance(variable[0], (list, np.ndarray)):
                variable = variable[0]
            return run_leabra_network(self.network, input_pattern=variable)

        else:
            # variable = convert_to_2d_input(variable)  # FIX: buggy, doesn't handle lists well
            if len(variable) != 2:
                raise LeabraError("Input Error: the input given ({}) for training was not the right format: the input "
                                  "should be a 2D array containing two vectors, corresponding to the input and the "
                                  "training target.".format(variable))
            if len(variable[0]) != len(self.network.layers[0].units) or len(variable[1]) != len(self.network.layers[-1].units):
                raise LeabraError("Input Error: the input given ({}) was not the right format: it should be a 2D array "
                                  "containing two vectors, corresponding to the input (which should be length {}) and "
                                  "the training target (which should be length {})".
                                  format(variable, self.network.layers[0], len(self.network.layers[-1].units)))
            return train_leabra_network(self.network, input_pattern=variable[0], output_pattern=variable[1])
Esempio n. 15
0
kwTypeDefaultPreferences = 'TypeDefaultPreferences'
kwSubtypeDefaultPreferences = 'SubtypeDefaultPreferences'
kwInstanceDefaultPreferences = 'InstanceDefaultPreferences'

# Level default preferences dicts:

ComponentPreferenceSetPrefs = {
    kpVerbosePref, kpParamValidationPref, kpReportOutputPref, kpLogPref,
    kpRuntimeParamModulationPref, kpRuntimeParamStickyAssignmentPref
}

SystemDefaultPreferencesDict = {
    kwPreferenceSetName:
    kwSystemDefaultPreferences,
    kpVerbosePref:
    PreferenceEntry(False, PreferenceLevel.SYSTEM),
    kpParamValidationPref:
    PreferenceEntry(True, PreferenceLevel.SYSTEM),
    kpReportOutputPref:
    PreferenceEntry(False, PreferenceLevel.SYSTEM),
    kpLogPref:
    PreferenceEntry(LogLevel.OFF, PreferenceLevel.CATEGORY),
    kpRuntimeParamModulationPref:
    PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.SYSTEM),
    kpRuntimeParamStickyAssignmentPref:
    PreferenceEntry(False, PreferenceLevel.SYSTEM)
}

CategoryDefaultPreferencesDict = {
    kwPreferenceSetName:
    kwCategoryDefaultPreferences,
Esempio n. 16
0
class LeabraMechanism(ProcessingMechanism_Base):
    """
    LeabraMechanism(                \
    leabra_net=None,                \
    input_size=1,                   \
    output_size=1,                  \
    hidden_layers=0,                \
    hidden_sizes=None,              \
    training_flag=False,            \
    params=None,                    \
    name=None,                      \
    prefs=None)

    Subclass of `ProcessingMechanism` that is a wrapper for a Leabra network in PsyNeuLink.

    Arguments
    ---------

    leabra_net : Optional[leabra.Network]
        a network object from the leabra package. If specified, the LeabraMechanism's network becomes **leabra_net**,
        and the other arguments that specify the network are ignored (**input_size**, **output_size**,
        **hidden_layers**, **hidden_sizes**).

    input_size : int : default 1
        an integer specifying how many units are in (the size of) the first layer (input) of the leabra network.

    output_size : int : default 1
        an integer specifying how many units are in (the size of) the final layer (output) of the leabra network.

    hidden_layers : int : default 0
        an integer specifying how many hidden layers are in the leabra network.

    hidden_sizes : int or List[int] : default input_size
        if specified, this should be a list of integers, specifying the size of each hidden layer. If **hidden_sizes**
        is a list, the number of integers in **hidden_sizes** should be equal to the number of hidden layers. If not
        specified, hidden layers will default to the same size as the input layer. If hidden_sizes is a single integer,
        then all hidden layers are of that size.

    training_flag : boolean : default None
        a boolean specifying whether the leabra network should be learning. If True, the leabra network will adjust
        its weights using the "leabra" algorithm, based on the training pattern (which is read from its second output
        state). The `training_flag` attribute can be changed after initialization, causing the leabra network to
        start/stop learning. If None, `training_flag` will default to False if **leabra_net** argument is not provided.
        If **leabra_net** argument is provided and `training_flag` is None, then the existing learning rules of the
        **leabra_net** will be preserved.

    quarter_size : int : default 50
        an integer specifying how many times the Leabra network cycles each time it is run. Lower values of
        quarter_size result in shorter execution times, though very low values may cause slight fluctuations in output.
        Lower values of quarter_size also effectively reduce the magnitude of learning weight changes during
        a given trial.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the mechanism, its function, and/or a custom function and its parameters.  Values specified for parameters in
        the dictionary override any assigned to those parameters in arguments of the constructor.

    name : str : default KWTA-<index>
        a string used for the name of the mechanism.
        If is not specified, a default is assigned by `MechanismRegistry`
        (see :doc:`Registry <LINK>` for conventions used in naming, including for default and duplicate names).

    prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences]
        the `PreferenceSet` for mechanism.
        If it is not specified, a default is assigned using `classPreferences` defined in __init__.py
        (see :doc:`PreferenceSet <LINK>` for details).

    context : str : default componentType+INITIALIZING
        string used for contextualization of instantiation, hierarchical calls, executions, etc.

    Attributes
    ----------

    variable : value
        the input to this Mechanism's `function <LeabraMechanism.function>`.

    function : LeabraFunction
        the function that wraps and executes the leabra mechanism

    value : 2d np.array [array(float64)]
        result of executing `function <LeabraMechanism.function>`.

    input_size : int : default 1
        an integer specifying how many units are in (the size of) the first layer (input) of the leabra network.

    output_size : int : default 1
        an integer specifying how many units are in (the size of) the final layer (output) of the leabra network.

    hidden_layers : int : default 0
        an integer specifying how many hidden layers are in the leabra network.

    hidden_sizes : int or List[int] : default input_size
        an integer or list of integers, specifying the size of each hidden layer.

    training_flag : boolean
        a boolean specifying whether the leabra network should be learning. If True, the leabra network will adjust
        its weights using the "leabra" algorithm, based on the training pattern (which is read from its second output
        state). The `training_flag` attribute can be changed after initialization, causing the leabra network to
        start/stop learning.

    quarter_size : int : default 50
        an integer specifying how many times the Leabra network cycles each time it is run. Lower values of
        quarter_size result in shorter execution times, though very low values may cause slight fluctuations in output.
        Lower values of quarter_size also effectively reduce the magnitude of learning weight changes during
        a given trial.

    network : leabra.Network
        the leabra.Network object which is executed by the LeabraMechanism. For more info about leabra Networks,
        please see the `leabra package <https://github.com/benureau/leabra>` on Github.

    output_states : *ContentAddressableList[OutputState]* : default [`RESULT <TRANSFER_MECHANISM_RESULT>`]
        list of Mechanism's `OutputStates <OutputStates>`.  By default there is a single OutputState,
        `RESULT <TRANSFER_MECHANISM_RESULT>`, that contains the result of a call to the Mechanism's
        `function <LeabraMechanism.function>`;  additional `standard <TransferMechanism_Standard_OutputStates>`
        and/or custom OutputStates may be included, based on the specifications made in the **output_states** argument
        of the Mechanism's constructor.

    output_values : List[array(float64)]
        each item is the `value <OutputState.value>` of the corresponding OutputState in `output_states
        <LeabraMechanism.output_states>`.  The default is a single item containing the result of the
        TransferMechanism's `function <LeabraMechanism.function>`;  additional
        ones may be included, based on the specifications made in the
        **output_states** argument of the Mechanism's constructor (see `TransferMechanism Standard OutputStates
        <TransferMechanism_Standard_OutputStates>`).

    name : str : default LeabraMechanism-<index>
        the name of the Mechanism.
        Specified in the **name** argument of the constructor for the Projection;
        if not specified, a default is assigned by `MechanismRegistry`
        (see :doc:`Registry <LINK>` for conventions used in naming, including for default and duplicate names).

    prefs : PreferenceSet or specification dict : Mechanism.classPreferences
        the `PreferenceSet` for Mechanism.
        Specified in the **prefs** argument of the constructor for the Mechanism;
        if it is not specified, a default is assigned using `classPreferences` defined in ``__init__.py``
        (see :doc:`PreferenceSet <LINK>` for details).

    Returns
    -------
    instance of LeabraMechanism : LeabraMechanism
    """

    componentType = LEABRA_MECHANISM

    is_self_learner = True  # CW 11/27/17: a flag; "True" if the mechanism self-learns. Declared in ProcessingMechanism

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'TransferCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE),
        kpRuntimeParamStickyAssignmentPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    # LeabraMechanism parameter and control signal assignments):
    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({FUNCTION: LeabraFunction,
                               INPUT_STATES: input_state_names,
                               OUTPUT_STATES: output_state_name})

    standard_output_states = standard_output_states.copy()

    def __init__(self,
                 leabra_net=None,
                 input_size=1,
                 output_size=1,
                 hidden_layers=0,
                 hidden_sizes=None,
                 training_flag=None,
                 quarter_size=50,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):
        if not leabra_available:
            raise LeabraError('leabra python module is not installed. Please install it from '
                              'https://github.com/benureau/leabra')

        if leabra_net is not None:
            leabra_network = leabra_net
            input_size = len(leabra_network.layers[0].units)
            output_size = len(leabra_network.layers[-1].units)
            hidden_layers = len(leabra_network.layers) - 2
            hidden_sizes = list(map(lambda x: len(x.units), leabra_network.layers))[1:-2]
            quarter_size = leabra_network.spec.quarter_size
            training_flag = infer_training_flag_from_network(leabra_network)
        else:
            if hidden_sizes is None:
                hidden_sizes = input_size
            if training_flag is None:
                training_flag = False
            leabra_network = build_leabra_network(input_size, output_size, hidden_layers, hidden_sizes,
                                                  training_flag, quarter_size)

        function = LeabraFunction(network=leabra_network)

        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(self,
                                                               self.standard_output_states,
                                                               indices=PRIMARY)

        params = self._assign_args_to_param_dicts(function=function,
                                                  input_size=input_size,
                                                  output_size=output_size,
                                                  hidden_layers=hidden_layers,
                                                  hidden_sizes=hidden_sizes,
                                                  training_flag=training_flag,
                                                  quarter_size=quarter_size,
                                                  params=params)

        super().__init__(size=[input_size, output_size],
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)

    def _execute(
        self,
        variable=None,
        function_variable=None,
        runtime_params=None,
        time_scale=TimeScale.TRIAL,
        # ignore_execution_id=False,
        context=None
    ):

        if runtime_params:
            if "training_flag" in runtime_params.keys():
                self.training_flag = runtime_params["training_flag"]
                del runtime_params["training_flag"]

        return super()._execute(
            variable=variable,
            function_variable=function_variable,
            runtime_params=runtime_params,
            # ignore_execution_id=ignore_execution_id,
            context=context
        )

    @property
    def training_flag(self):
        return self._training_flag

    @training_flag.setter
    def training_flag(self, value):
        if self._training_flag is value:
            return
        set_training(self.function_object.network, value)
        self._training_flag = value

    @property
    def network(self):
        return self.function_object.network

    @network.setter
    def network(self, value):
        self.function_object.network = value
class ProcessingMechanism(ProcessingMechanism_Base):
    """
    ProcessingMechanism(                            \
    default_variable=None,                               \
    size=None,                                              \
    function=Linear, \
    params=None,                                            \
    name=None,                                              \
    prefs=None)

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that does not have any specialized features.

    Arguments
    ---------

    default_variable : number, list or np.ndarray
        the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` methods;
        also serves as a template to specify the length of `variable <ProcessingMechanism.variable>` for
        `function <ProcessingMechanism.function>`, and the `primary outputState <OutputState_Primary>` of the
        Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            P1 = ProcessingMechanism(size = [3, 2])
            P2 = ProcessingMechanism(default_variable = [[0, 0, 0], [0, 0]])

    function : PsyNeuLink Function : default Linear
        specifies the function used to compute the output

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, parameters for its `function <ProcessingMechanism.function>`, and/or a custom function and its
        parameters.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    name : str : default see `name <ProcessingMechanism.name>`
        specifies the name of the ProcessingMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the ProcessingMechanism; see `prefs <ProcessingMechanism.prefs>` for details.

    Attributes
    ----------
    variable : value: default
        the input to Mechanism's `function`.

    name : str
        the name of the ProcessingMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the ProcessingMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).

    """

    componentType = PROCESSING_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'ProcessingMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    paramClassDefaults = ProcessingMechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({OUTPUT_STATES: [PREDICTION_MECHANISM_OUTPUT]})

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 output_states: tc.optional(tc.any(str, Iterable)) = None,
                 function=Linear,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  params=params)

        super(ProcessingMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_states=input_states,
                             function=function,
                             output_states=output_states,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)