Пример #1
0
class EVCAuxiliaryFunction(Function_Base):
    """Base class for EVC auxiliary functions
    """
    componentType = kwEVCAuxFunctionType

    class Parameters(Function_Base.Parameters):
        """
            Attributes
            ----------

                filter_function
                    see `filter_function <PredictionMechanism.filter_function>`

                    :default value: None
                    :type:

                rate
                    see `rate <PredictionMechanism.rate>`

                    :default value: 1.0
                    :type: float

                window_size
                    see `window_size <PredictionMechanism.window_size>`

                    :default value: 1
                    :type: int

        """
        variable = Parameter(None,
                             read_only=True,
                             pnl_internal=True,
                             constructor_argument='default_variable')

    classPreferences = {
        PREFERENCE_SET_NAME: 'ValueFunctionCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE),
    }

    @tc.typecheck
    def __init__(self,
                 function,
                 variable=None,
                 params=None,
                 owner=None,
                 prefs: is_pref_set = None,
                 context=None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(params=params)
        self.aux_function = function

        super().__init__(
            default_variable=variable,
            params=params,
            owner=owner,
            prefs=prefs,
            context=context,
            function=function,
        )
class CompositionInterfaceMechanism(ProcessingMechanism_Base):
    """
    CompositionInterfaceMechanism(  \
        function=Identity())

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that acts as interface between a Composition and its
    inputs from and outputs to the environment or other Mechanisms (if it is a nested Composition).

    See `Mechanism <Mechanism_Class_Reference>` for arguments and additonal attributes.

    Attributes
    ----------

    function : InterfaceFunction : default Identity
        the function used to transform the variable before assigning it to the Mechanism's OutputPort(s)

    """

    componentType = COMPOSITION_INTERFACE_MECHANISM
    outputPortTypes = [OutputPort, ControlSignal]

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TYPE_DEFAULT_PREFERENCES
    classPreferences = {
        PREFERENCE_SET_NAME:
        'CompositionInterfaceMechanismCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(ProcessingMechanism_Base.Parameters):
        function = Parameter(Identity, stateful=False, loggable=False)

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(
                     tc.any(Iterable, Mechanism, OutputPort,
                            InputPort)) = None,
                 function=None,
                 composition=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.connected_to_composition = False

        super(CompositionInterfaceMechanism, self).__init__(
            default_variable=default_variable,
            size=size,
            input_ports=input_ports,
            function=function,
            params=params,
            name=name,
            prefs=prefs,
        )
Пример #3
0
class EVCAuxiliaryFunction(Function_Base):
    """Base class for EVC auxiliary functions
    """
    componentType = kwEVCAuxFunctionType

    class Parameters(Function_Base.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <Function_Base.variable>`

                    :default value: numpy.array([0])
                    :type: numpy.ndarray
                    :read only: True

        """
        variable = None

    classPreferences = {
        kwPreferenceSetName: 'ValueFunctionCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE),
    }

    @tc.typecheck
    def __init__(self,
                 function,
                 variable=None,
                 params=None,
                 owner=None,
                 prefs: is_pref_set = None,
                 context=None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(params=params)
        self.aux_function = function

        super().__init__(
            default_variable=variable,
            params=params,
            owner=owner,
            prefs=prefs,
            context=context,
            function=function,
        )
Пример #4
0
class EVCAuxiliaryFunction(Function_Base):
    """Base class for EVC auxiliary functions
    """
    componentType = kwEVCAuxFunctionType

    class Parameters(Function_Base.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <Function_Base.variable>`

                    :default value: numpy.array([0])
                    :type: numpy.ndarray
                    :read only: True

        """
        variable = Parameter(None, pnl_internal=True, constructor_argument='default_variable')

    classPreferences = {
        PREFERENCE_SET_NAME: 'ValueFunctionCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE),
       }

    @tc.typecheck
    def __init__(self,
                 function,
                 variable=None,
                 params=None,
                 owner=None,
                 prefs:is_pref_set=None,
                 context=None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(params=params)
        self.aux_function = function

        super().__init__(default_variable=variable,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         context=context,
                         function=function,
                         )
class ComparatorMechanism(ObjectiveMechanism):
    """
    ComparatorMechanism(                                \
        sample,                                         \
        target,                                         \
        input_ports=[SAMPLE,TARGET]                     \
        function=LinearCombination(weights=[[-1],[1]],  \
        output_ports=OUTCOME)

    Subclass of `ObjectiveMechanism` that compares the values of two `OutputPorts <OutputPort>`.
    See `ObjectiveMechanism <ObjectiveMechanism_Class_Reference>` for additional arguments and attributes.


    Arguments
    ---------

    sample : OutputPort, Mechanism, value, or string
        specifies the value to compare with the `target` by the `function <ComparatorMechanism.function>`.

    target :  OutputPort, Mechanism, value, or string
        specifies the value with which the `sample` is compared by the `function <ComparatorMechanism.function>`.

    input_ports :  List[InputPort, value, str or dict] or Dict[] : default [SAMPLE, TARGET]
        specifies the names and/or formats to use for the values of the sample and target InputPorts;
        by default they are named *SAMPLE* and *TARGET*, and their formats are match the value of the OutputPorts
        specified in the **sample** and **target** arguments, respectively (see `ComparatorMechanism_Structure`
        for additional details).

    function :  Function, function or method : default Distance(metric=DIFFERENCE)
        specifies the `function <Comparator.function>` used to compare the `sample` with the `target`.


    Attributes
    ----------

    COMMENT:
    default_variable : Optional[List[array] or 2d np.array]
    COMMENT

    sample : OutputPort
        determines the value to compare with the `target` by the `function <ComparatorMechanism.function>`.

    target : OutputPort
        determines the value with which `sample` is compared by the `function <ComparatorMechanism.function>`.

    input_ports : ContentAddressableList[InputPort, InputPort]
        contains the two InputPorts named, by default, *SAMPLE* and *TARGET*, each of which receives a
        `MappingProjection` from the OutputPorts referenced by the `sample` and `target` attributes
        (see `ComparatorMechanism_Structure` for additional details).

    function : CombinationFunction, function or method
        used to compare the `sample` with the `target`.  It can be any PsyNeuLink `CombinationFunction`,
        or a python function that takes a 2d array with two items and returns a 1d array of the same length
        as the two input items.

    output_port : OutputPort
        contains the `primary <OutputPort_Primary>` OutputPort of the ComparatorMechanism; the default is
        its *OUTCOME* OutputPort, the value of which is equal to the `value <ComparatorMechanism.value>`
        attribute of the ComparatorMechanism.

    output_ports : ContentAddressableList[OutputPort]
        contains, by default, only the *OUTCOME* (primary) OutputPort of the ComparatorMechanism.

    output_values : 2d np.array
        contains one item that is the value of the *OUTCOME* OutputPort.

    standard_output_ports : list[str]
        list of `Standard OutputPorts <OutputPort_Standard>` that includes the following in addition to the
        `standard_output_ports <ObjectiveMechanism.standard_output_ports>` of an `ObjectiveMechanism`:

        .. _COMPARATOR_MECHANISM_SSE

        *SSE*
            the value of the sum squared error of the Mechanism's function

        .. _COMPARATOR_MECHANISM_MSE

        *MSE*
            the value of the mean squared error of the Mechanism's function

    """
    componentType = COMPARATOR_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TYPE_DEFAULT_PREFERENCES
    classPreferences = {
        PREFERENCE_SET_NAME: 'ComparatorCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)}

    class Parameters(ObjectiveMechanism.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <Mechanism_Base.variable>`

                    :default value: numpy.array([[0], [0]])
                    :type: numpy.ndarray
                    :read only: True

                function
                    see `function <ComparatorMechanism.function>`

                    :default value: `LinearCombination`(weights=numpy.array([[-1], [ 1]]))
                    :type: `Function`

                sample
                    see `sample <ComparatorMechanism.sample>`

                    :default value: None
                    :type:

                target
                    see `target <ComparatorMechanism.target>`

                    :default value: None
                    :type:

        """
        # By default, ComparatorMechanism compares two 1D np.array input_ports
        variable = Parameter(np.array([[0], [0]]), read_only=True, pnl_internal=True, constructor_argument='default_variable')
        function = Parameter(LinearCombination(weights=[[-1], [1]]), stateful=False, loggable=False)
        sample = None
        target = None

        output_ports = Parameter(
            [OUTCOME],
            stateful=False,
            loggable=False,
            read_only=True,
            structural=True,
        )

    # ComparatorMechanism parameter and control signal assignments):
    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()

    standard_output_ports = ObjectiveMechanism.standard_output_ports.copy()
    standard_output_ports.extend([{NAME: SSE,
                                   FUNCTION: lambda x: np.sum(x * x)},
                                  {NAME: MSE,
                                   FUNCTION: lambda x: np.sum(x * x) / safe_len(x)}])
    standard_output_port_names = ObjectiveMechanism.standard_output_port_names.copy()
    standard_output_port_names.extend([SSE, MSE])

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 sample: tc.optional(tc.any(OutputPort, Mechanism_Base, dict, is_numeric, str))=None,
                 target: tc.optional(tc.any(OutputPort, Mechanism_Base, dict, is_numeric, str))=None,
                 function=LinearCombination(weights=[[-1], [1]]),
                 output_ports:tc.optional(tc.any(str, Iterable)) = None,
                 params=None,
                 name=None,
                 prefs:is_pref_set=None,
                 **kwargs
                 ):

        input_ports = kwargs.pop(INPUT_PORTS, {})
        if input_ports:
            input_ports = {INPUT_PORTS: input_ports}

        input_ports = self._merge_legacy_constructor_args(sample, target, default_variable, input_ports)

        # Default output_ports is specified in constructor as a tuple rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if isinstance(output_ports, (str, tuple)):
            output_ports = list(output_ports)

        # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment
        #                     (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE
        # output_ports = output_ports or [OUTCOME, MSE]

        super().__init__(monitor=input_ports,
                         function=function,
                         output_ports=output_ports, # prevent default from getting overwritten by later assign
                         params=params,
                         name=name,
                         prefs=prefs,

                         **kwargs
                         )

        # Require Projection to TARGET InputPort (already required for SAMPLE as primary InputPort)
        self.input_ports[1].parameters.require_projection_in_composition._set(True, Context())

    def _validate_params(self, request_set, target_set=None, context=None):
        """If sample and target values are specified, validate that they are compatible
        """

        if INPUT_PORTS in request_set and request_set[INPUT_PORTS] is not None:
            input_ports = request_set[INPUT_PORTS]

            # Validate that there are exactly two input_ports (for sample and target)
            num_input_ports = len(input_ports)
            if num_input_ports != 2:
                raise ComparatorMechanismError(f"{INPUT_PORTS} arg is specified for {self.__class__.__name__} "
                                               f"({len(input_ports)}), so it must have exactly 2 items, "
                                               f"one each for {SAMPLE} and {TARGET}.")

            # Validate that input_ports are specified as dicts
            if not all(isinstance(input_port,dict) for input_port in input_ports):
                raise ComparatorMechanismError("PROGRAM ERROR: all items in input_port args must be converted to dicts"
                                               " by calling Port._parse_port_spec() before calling super().__init__")

            # Validate length of variable for sample = target
            if VARIABLE in input_ports[0]:
                # input_ports arg specified in standard port specification dict format
                lengths = [len(input_port[VARIABLE]) if input_port[VARIABLE] is not None else 0
                           for input_port in input_ports]
            else:
                # input_ports arg specified in {<Port_Name>:<PORT SPECIFICATION DICT>} format
                lengths = [len(list(input_port_dict.values())[0][VARIABLE]) for input_port_dict in input_ports]

            if lengths[0] != lengths[1]:
                raise ComparatorMechanismError(f"Length of value specified for {SAMPLE} InputPort "
                                               f"of {self.__class__.__name__} ({lengths[0]}) must be "
                                               f"same as length of value specified for {TARGET} ({lengths[1]}).")

        elif SAMPLE in request_set and TARGET in request_set:

            sample = request_set[SAMPLE]
            if isinstance(sample, InputPort):
                sample_value = sample.value
            elif isinstance(sample, Mechanism):
                sample_value = sample.input_value[0]
            elif is_value_spec(sample):
                sample_value = sample
            else:
                sample_value = None

            target = request_set[TARGET]
            if isinstance(target, InputPort):
                target_value = target.value
            elif isinstance(target, Mechanism):
                target_value = target.input_value[0]
            elif is_value_spec(target):
                target_value = target
            else:
                target_value = None

            if sample is not None and target is not None:
                if not iscompatible(sample, target, **{kwCompatibilityLength: True,
                                                       kwCompatibilityNumeric: True}):
                    raise ComparatorMechanismError(f"The length of the sample ({len(sample)}) "
                                                   f"must be the same as for the target ({len(target)})"
                                                   f"for {self.__class__.__name__} {self.name}.")

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

    def _merge_legacy_constructor_args(self, sample, target, default_variable=None, input_ports=None):

        # USE sample and target TO CREATE AN InputPort specfication dictionary for each;
        # DO SAME FOR InputPorts argument, USE TO OVERWRITE ANY SPECIFICATIONS IN sample AND target DICTS
        # TRY tuple format AS WAY OF PROVIDED CONSOLIDATED variable AND OutputPort specifications

        sample_dict = _parse_port_spec(owner=self,
                                        port_type=InputPort,
                                        port_spec=sample,
                                        name=SAMPLE)

        target_dict = _parse_port_spec(owner=self,
                                        port_type=InputPort,
                                        port_spec=target,
                                        name=TARGET)

        # If either the default_variable arg or the input_ports arg is provided:
        #    - validate that there are exactly two items in default_variable or input_ports list
        #    - if there is an input_ports list, parse it and use it to update sample and target dicts
        if input_ports:
            input_ports = input_ports[INPUT_PORTS]
            # print("type input_ports = {}".format(type(input_ports)))
            if not isinstance(input_ports, list):
                raise ComparatorMechanismError(f"If an '{INPUT_PORTS}' argument is included in the constructor "
                                               f"for a {ComparatorMechanism.__name__} it must be a list with "
                                               f"two {InputPort.__name__} specifications.")

        input_ports = input_ports or default_variable

        if input_ports is not None:
            if len(input_ports)!=2:
                raise ComparatorMechanismError(f"If an \'input_ports\' arg is included in the constructor for a "
                                               f"{ComparatorMechanism.__name__}, it must be a list with exactly "
                                               f"two items (not {len(input_ports)}).")

            sample_input_port_dict = _parse_port_spec(owner=self,
                                                        port_type=InputPort,
                                                        port_spec=input_ports[0],
                                                        name=SAMPLE,
                                                        value=None)

            target_input_port_dict = _parse_port_spec(owner=self,
                                                        port_type=InputPort,
                                                        port_spec=input_ports[1],
                                                        name=TARGET,
                                                        value=None)

            sample_dict = recursive_update(sample_dict, sample_input_port_dict)
            target_dict = recursive_update(target_dict, target_input_port_dict)

        return [sample_dict, target_dict]
Пример #6
0
class PredictionErrorMechanism(ComparatorMechanism):
    """
    PredictionErrorMechanism(                                \
        sample,                                              \
        target,                                              \
        function=PredictionErrorDeltaFunction,               \
        output_ports=[OUTCOME],                             \
        params=None,                                         \
        name=None,                                           \
        prefs=None)

    Subclass of ComparatorMechanism that calculates the prediction error between the predicted reward and the target.
    See `ComparatorMechanism <ComparatorMechanism_Class_Reference>` for additional arguments and attributes.

    Arguments
    ---------

    sample : OutputPort, Mechanism_Base, dict, number, or str
        specifies the *SAMPLE* InputPort, that is evaluated by the `function <PredictionErrorMechanism.function>`.

    target : OutputPort, Mechanism_Base, dict, number, or str
        specifies the *TARGET* InputPort used by the function to evaluate `sample<PredictionErrorMechanism.sample>`.

    function : CombinationFunction, ObjectiveFunction, function, or method : default PredictionErrorDeltaFunction
        the function used to evaluate the SAMPLE and TARGET inputs.

    learning_rate : Number : default 0.3
        controls the weight of later timesteps compared to earlier ones;  higher rates weight later timesteps more
        heavily than previous ones.

    Attributes
    ----------

    sample : OutputPort, Mechanism_Base, dict, number, or str
        the *SAMPLE* `InputPort`, the `value <InputPort.value>` of which will be evaluated by the function.

    target : OutputPort, Mechanism_Base, dict, number, or str
        the *TARGET* `InputPort`, the `value <InputPort.value>` of which will be used to evaluate `sample
        <PredictionErrorMechanism.sample>`.

    function : CombinationFunction, ObjectiveFunction, Function, or method : default PredictionErrorDeltaFunction
        the function used to evaluate the sample and target inputs.

    output_ports : str, Iterable : default OUTCOME
        by default, contains only the *OUTCOME* (`primary <OutputPort_Primary>`) `OutputPort` of the
        PredictionErrorMechanism.

    learning_rate : Number : default 0.3
        controls the weight of later timesteps compared to earlier ones; higher rates weight later timesteps more
        heavily than previous ones.

    """
    componentType = PREDICTION_ERROR_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    classPreferences = {
        PREFERENCE_SET_NAME: 'PredictionErrorMechanismCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(ComparatorMechanism.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <Mechanism_Base.variable>`

                    :default value: None
                    :type:
                    :read only: True

                function
                    see `function <PredictionErrorMechanism.function>`

                    :default value: `PredictionErrorDeltaFunction`
                    :type: `Function`

                learning_rate
                    see `learning_rate <PredictionErrorMechanism.learning_rate>`

                    :default value: 0.3
                    :type: float

        """

        variable = Parameter(None, read_only=True, pnl_internal=True, constructor_argument='default_variable')
        learning_rate = Parameter(0.3, modulable=True)
        function = PredictionErrorDeltaFunction
        sample = None
        target = None

    @tc.typecheck
    def __init__(self,
                 sample: tc.optional(tc.any(OutputPort, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputPort, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_ports: tc.optional(tc.any(str, Iterable)) = None,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs
                 ):

        input_ports = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_ports=input_ports,
                         function=function,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs
                         )

    def _parse_function_variable(self, variable, context=None):
        # TODO: update to take sample/reward from variable
        # sample = x(t) in Montague on first run, V(t) on subsequent runs
        sample = self.input_ports[SAMPLE].parameters.value._get(context)
        reward = self.input_ports[TARGET].parameters.value._get(context)

        return [sample, reward]

    def _execute(self, variable=None, context=None, runtime_params=None):
        delta = super()._execute(variable=variable, context=context, runtime_params=runtime_params)
        delta = delta[0][1:]
        delta = np.append(delta, 0)
        return delta
Пример #7
0
class LeabraFunction(Function_Base):
    """
    LeabraFunction(             \
        default_variable=None,  \
        network=None,           \
        params=None,            \
        owner=None,             \
        prefs=None)

    .. _LeabraFunction:

    LeabraFunction is a custom function that lives inside the LeabraMechanism. As a function, it transforms the
    variable by providing it as input to the leabra network inside the LeabraFunction.

    Arguments
    ---------

    default_variable : number or np.array : default np.zeros() (array of zeros)
        specifies a template for the input to the leabra network.

    network : leabra.Network
        specifies the leabra network to be used.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that specifies the parameters for the
        function.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    owner : Component
        `component <Component>` to which to assign the Function.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the LeabraMechanism; see `prefs <LeabraMechanism.prefs>` for details.


    Attributes
    ----------

    variable : number or np.array
        contains value to be transformed.

    network : leabra.Network
        the leabra network that is being used

    owner : Mechanism
        `component <Component>` to which the Function has been assigned.

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the LeabraMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).

    """

    componentType = LEABRA_FUNCTION_TYPE
    componentName = LEABRA_FUNCTION

    multiplicative_param = NotImplemented
    additive_param = NotImplemented  # very hacky

    classPreferences = {
        kwPreferenceSetName: 'LeabraFunctionClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    paramClassDefaults = Function_Base.paramClassDefaults.copy()

    class Parameters(Function_Base.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <LeabraFunction.variable>`

                    :default value: numpy.array([[0], [0]])
                    :type: numpy.ndarray
                    :read only: True

                network
                    see `network <LeabraFunction.network>`

                    :default value: None
                    :type:

        """
        variable = Parameter(np.array([[0], [0]]), read_only=True)
        network = None

    def __init__(self,
                 default_variable=None,
                 network=None,
                 params=None,
                 owner=None,
                 prefs=None):

        if not leabra_available:
            raise LeabraError(
                'leabra python module is not installed. Please install it from '
                'https://github.com/benureau/leabra')

        if network is None:
            raise LeabraError(
                'network was None. Cannot create function for Leabra Mechanism if network is not specified.'
            )

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(network=network,
                                                  params=params)

        if default_variable is None:
            input_size = len(self.network.layers[0].units)
            output_size = len(self.network.layers[-1].units)
            default_variable = [np.zeros(input_size), np.zeros(output_size)]

        super().__init__(default_variable=default_variable,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)

    def _validate_variable(self, variable, context=None):
        if not isinstance(variable, (list, np.ndarray, numbers.Number)):
            raise LeabraError(
                "Input Error: the input variable ({}) was of type {}, but instead should be a list, "
                "numpy array, or number.".format(variable, type(variable)))

        input_size = len(self.network.layers[0].units)
        output_size = len(self.network.layers[-1].units)
        if (not hasattr(self, "owner")) or (not hasattr(
                self.owner,
                "training_flag")) or self.owner.training_flag is False:
            if len(convert_to_2d_input(variable)[0]) != input_size:
                # convert_to_2d_input(variable[0]) is just in case variable is a 2D array rather than a vector
                raise LeabraError(
                    "Input Error: the input was {}, which was of an incompatible length with the "
                    "input_size, which should be {}.".format(
                        convert_to_2d_input(variable)[0], input_size))
        else:
            if len(convert_to_2d_input(variable)[0]) != input_size or len(
                    convert_to_2d_input(variable)[1]) != output_size:
                raise LeabraError(
                    "Input Error: the input variable was {}, which was of an incompatible length with "
                    "the input_size or output_size, which should be {} and {} respectively."
                    .format(variable, input_size, output_size))
        return variable

    def _validate_params(self, request_set, target_set=None, context=None):
        if not isinstance(request_set[NETWORK], leabra.Network):
            raise LeabraError(
                "Error: the network given ({}) was of type {}, but instead must be a leabra Network."
                .format(request_set[NETWORK], type(request_set[NETWORK])))
        super()._validate_params(request_set, target_set, context)

    def function(self,
                 variable=None,
                 execution_id=None,
                 params=None,
                 context=None):
        variable = self._check_args(variable=variable,
                                    execution_id=execution_id,
                                    params=params,
                                    context=context)

        network = self.parameters.network.get(execution_id)
        # HACK: otherwise the INITIALIZING function executions impact the state of the leabra network
        if self.parameters.context.get(
                execution_id
        ).initialization_status == ContextFlags.INITIALIZING:
            output_size = len(network.layers[-1].units)
            return np.zeros(output_size)

        try:
            training_flag = self.owner.parameters.training_flag.get(
                execution_id)
        except AttributeError:
            training_flag = False

        # None or False
        if not training_flag:
            if isinstance(variable[0], (list, np.ndarray)):
                variable = variable[0]
            return run_leabra_network(network, input_pattern=variable)

        else:
            # variable = convert_to_2d_input(variable)  # FIX: buggy, doesn't handle lists well
            if len(variable) != 2:
                raise LeabraError(
                    "Input Error: the input given ({}) for training was not the right format: the input "
                    "should be a 2D array containing two vectors, corresponding to the input and the "
                    "training target.".format(variable))
            if len(variable[0]) != len(network.layers[0].units) or len(
                    variable[1]) != len(network.layers[-1].units):
                raise LeabraError(
                    "Input Error: the input given ({}) was not the right format: it should be a 2D array "
                    "containing two vectors, corresponding to the input (which should be length {}) and "
                    "the training target (which should be length {})".format(
                        variable, network.layers[0],
                        len(network.layers[-1].units)))
            return train_leabra_network(network,
                                        input_pattern=variable[0],
                                        output_pattern=variable[1])
Пример #8
0
class LeabraMechanism(ProcessingMechanism_Base):
    """
    LeabraMechanism(                \
    leabra_net=None,                \
    input_size=1,                   \
    output_size=1,                  \
    hidden_layers=0,                \
    hidden_sizes=None,              \
    training_flag=False,            \
    params=None,                    \
    name=None,                      \
    prefs=None)

    Subclass of `ProcessingMechanism` that is a wrapper for a Leabra network in PsyNeuLink.

    Arguments
    ---------

    leabra_net : Optional[leabra.Network]
        a network object from the leabra package. If specified, the LeabraMechanism's network becomes **leabra_net**,
        and the other arguments that specify the network are ignored (**input_size**, **output_size**,
        **hidden_layers**, **hidden_sizes**).

    input_size : int : default 1
        an integer specifying how many units are in (the size of) the first layer (input) of the leabra network.

    output_size : int : default 1
        an integer specifying how many units are in (the size of) the final layer (output) of the leabra network.

    hidden_layers : int : default 0
        an integer specifying how many hidden layers are in the leabra network.

    hidden_sizes : int or List[int] : default input_size
        if specified, this should be a list of integers, specifying the size of each hidden layer. If **hidden_sizes**
        is a list, the number of integers in **hidden_sizes** should be equal to the number of hidden layers. If not
        specified, hidden layers will default to the same size as the input layer. If hidden_sizes is a single integer,
        then all hidden layers are of that size.

    training_flag : boolean : default None
        a boolean specifying whether the leabra network should be learning. If True, the leabra network will adjust
        its weights using the "leabra" algorithm, based on the training pattern (which is read from its second output
        state). The `training_flag` attribute can be changed after initialization, causing the leabra network to
        start/stop learning. If None, `training_flag` will default to False if **leabra_net** argument is not provided.
        If **leabra_net** argument is provided and `training_flag` is None, then the existing learning rules of the
        **leabra_net** will be preserved.

    quarter_size : int : default 50
        an integer specifying how many times the Leabra network cycles each time it is run. Lower values of
        quarter_size result in shorter execution times, though very low values may cause slight fluctuations in output.
        Lower values of quarter_size also effectively reduce the magnitude of learning weight changes during
        a given trial.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the mechanism, its function, and/or a custom function and its parameters.  Values specified for parameters in
        the dictionary override any assigned to those parameters in arguments of the constructor.

    name : str : default KWTA-<index>
        a string used for the name of the mechanism.
        If is not specified, a default is assigned by `MechanismRegistry`
        (see :doc:`Registry <LINK>` for conventions used in naming, including for default and duplicate names).

    prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences]
        the `PreferenceSet` for mechanism.
        If it is not specified, a default is assigned using `classPreferences` defined in __init__.py
        (see :doc:`PreferenceSet <LINK>` for details).

    context : str : default componentType+INITIALIZING
        string used for contextualization of instantiation, hierarchical calls, executions, etc.

    Attributes
    ----------

    variable : value
        the input to this Mechanism's `function <LeabraMechanism.function>`.

    function : LeabraFunction
        the function that wraps and executes the leabra mechanism

    value : 2d np.array [array(float64)]
        result of executing `function <LeabraMechanism.function>`.

    input_size : int : default 1
        an integer specifying how many units are in (the size of) the first layer (input) of the leabra network.

    output_size : int : default 1
        an integer specifying how many units are in (the size of) the final layer (output) of the leabra network.

    hidden_layers : int : default 0
        an integer specifying how many hidden layers are in the leabra network.

    hidden_sizes : int or List[int] : default input_size
        an integer or list of integers, specifying the size of each hidden layer.

    training_flag : boolean
        a boolean specifying whether the leabra network should be learning. If True, the leabra network will adjust
        its weights using the "leabra" algorithm, based on the training pattern (which is read from its second output
        state). The `training_flag` attribute can be changed after initialization, causing the leabra network to
        start/stop learning.

    quarter_size : int : default 50
        an integer specifying how many times the Leabra network cycles each time it is run. Lower values of
        quarter_size result in shorter execution times, though very low values may cause slight fluctuations in output.
        Lower values of quarter_size also effectively reduce the magnitude of learning weight changes during
        a given trial.

    network : leabra.Network
        the leabra.Network object which is executed by the LeabraMechanism. For more info about leabra Networks,
        please see the `leabra package <https://github.com/benureau/leabra>` on Github.

    output_states : *ContentAddressableList[OutputState]* : default [`RESULT <TRANSFER_MECHANISM_RESULT>`]
        list of Mechanism's `OutputStates <OutputStates>`.  By default there is a single OutputState,
        `RESULT <TRANSFER_MECHANISM_RESULT>`, that contains the result of a call to the Mechanism's
        `function <LeabraMechanism.function>`;  additional `standard <TransferMechanism_Standard_OutputStates>`
        and/or custom OutputStates may be included, based on the specifications made in the **output_states** argument
        of the Mechanism's constructor.

    output_values : List[array(float64)]
        each item is the `value <OutputState.value>` of the corresponding OutputState in `output_states
        <LeabraMechanism.output_states>`.  The default is a single item containing the result of the
        TransferMechanism's `function <LeabraMechanism.function>`;  additional
        ones may be included, based on the specifications made in the
        **output_states** argument of the Mechanism's constructor (see `TransferMechanism Standard OutputStates
        <TransferMechanism_Standard_OutputStates>`).

    name : str : default LeabraMechanism-<index>
        the name of the Mechanism.
        Specified in the **name** argument of the constructor for the Projection;
        if not specified, a default is assigned by `MechanismRegistry`
        (see :doc:`Registry <LINK>` for conventions used in naming, including for default and duplicate names).

    prefs : PreferenceSet or specification dict : Mechanism.classPreferences
        the `PreferenceSet` for Mechanism.
        Specified in the **prefs** argument of the constructor for the Mechanism;
        if it is not specified, a default is assigned using `classPreferences` defined in ``__init__.py``
        (see :doc:`PreferenceSet <LINK>` for details).

    Returns
    -------
    instance of LeabraMechanism : LeabraMechanism
    """

    componentType = LEABRA_MECHANISM

    is_self_learner = True  # CW 11/27/17: a flag; "True" if the mechanism self-learns. Declared in ProcessingMechanism

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'TransferCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    # LeabraMechanism parameter and control signal assignments):
    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({
        FUNCTION: LeabraFunction,
        INPUT_STATES: input_state_names,
        OUTPUT_STATES: output_state_name
    })

    standard_output_states = standard_output_states.copy()

    class Parameters(ProcessingMechanism_Base.Parameters):
        """
            Attributes
            ----------

                hidden_layers
                    see `hidden_layers <LeabraMechanism.hidden_layers>`

                    :default value: 0
                    :type: int

                hidden_sizes
                    see `hidden_sizes <LeabraMechanism.hidden_sizes>`

                    :default value: None
                    :type:

                input_size
                    see `input_size <LeabraMechanism.input_size>`

                    :default value: 1
                    :type: int

                network
                    see `network <LeabraMechanism.network>`

                    :default value: None
                    :type:

                output_size
                    see `output_size <LeabraMechanism.output_size>`

                    :default value: 1
                    :type: int

                quarter_size
                    see `quarter_size <LeabraMechanism.quarter_size>`

                    :default value: 50
                    :type: int

                training_flag
                    see `training_flag <LeabraMechanism.training_flag>`

                    :default value: None
                    :type:

        """
        input_size = 1
        output_size = 1
        hidden_layers = 0
        hidden_sizes = None
        quarter_size = 50

        network = Parameter(None,
                            getter=_network_getter,
                            setter=_network_setter)
        training_flag = Parameter(None, setter=_training_flag_setter)

    def __init__(self,
                 leabra_net=None,
                 input_size=1,
                 output_size=1,
                 hidden_layers=0,
                 hidden_sizes=None,
                 training_flag=None,
                 quarter_size=50,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):
        if not leabra_available:
            raise LeabraError(
                'leabra python module is not installed. Please install it from '
                'https://github.com/benureau/leabra')

        if leabra_net is not None:
            leabra_network = leabra_net
            input_size = len(leabra_network.layers[0].units)
            output_size = len(leabra_network.layers[-1].units)
            hidden_layers = len(leabra_network.layers) - 2
            hidden_sizes = list(
                map(lambda x: len(x.units), leabra_network.layers))[1:-2]
            quarter_size = leabra_network.spec.quarter_size
            training_flag = infer_training_flag_from_network(leabra_network)
        else:
            if hidden_sizes is None:
                hidden_sizes = input_size
            if training_flag is None:
                training_flag = False
            leabra_network = build_leabra_network(input_size, output_size,
                                                  hidden_layers, hidden_sizes,
                                                  training_flag, quarter_size)

        function = LeabraFunction(network=leabra_network)

        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        params = self._assign_args_to_param_dicts(function=function,
                                                  input_size=input_size,
                                                  output_size=output_size,
                                                  hidden_layers=hidden_layers,
                                                  hidden_sizes=hidden_sizes,
                                                  training_flag=training_flag,
                                                  quarter_size=quarter_size,
                                                  params=params)

        super().__init__(size=[input_size, output_size],
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)

    def _execute(self,
                 variable=None,
                 execution_id=None,
                 runtime_params=None,
                 time_scale=TimeScale.TRIAL,
                 context=None):

        if runtime_params:
            if "training_flag" in runtime_params.keys():
                self.parameters.training_flag.set(
                    runtime_params["training_flag"], execution_id)
                del runtime_params["training_flag"]

        return super()._execute(variable=variable,
                                execution_id=execution_id,
                                runtime_params=runtime_params,
                                context=context)
Пример #9
0
class CompositionInterfaceMechanism(ProcessingMechanism_Base):
    """
    CompositionInterfaceMechanism(                            \
    default_variable=None,                               \
    size=None,                                              \
    function=Identity() \
    params=None,                                            \
    name=None,                                              \
    prefs=None)

    Implements the CompositionInterfaceMechanism subclass of Mechanism.

    Arguments
    ---------

    default_variable : number, list or np.ndarray
        the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` methods;
        also serves as a template to specify the length of `variable <CompositionInterfaceMechanism.variable>` for
        `function <CompositionInterfaceMechanism.function>`, and the `primary outputState <OutputState_Primary>` of the
        Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.

    function : InterfaceFunction : default Identity
        specifies the function used to transform the variable before assigning it to the Mechanism's OutputState(s)

    params : Optional[Dict[param keyword, param value]]
        a `parameter dictionary <ParameterState_Specifying_Parameters>` that can be used to specify the parameters for
        the `Mechanism <Mechanism>`, parameters for its `function <CompositionInterfaceMechanism.function>`, and/or a
        custom function and its parameters.  Values specified for parameters in the dictionary override any assigned
        to those parameters in arguments of the constructor.

    name : str : default CompositionInterfaceMechanism-<index>
        a string used for the name of the Mechanism.
        If not is specified, a default is assigned by `MechanismRegistry`
        (see :doc:`Registry <LINK>` for conventions used in naming, including for default and duplicate names).

    prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences]
        the `PreferenceSet` for Mechanism.
        If it is not specified, a default is assigned using `classPreferences` defined in __init__.py
        (see :doc:`PreferenceSet <LINK>` for details).

    Attributes
    ----------
    variable : value: default
        the input to Mechanism's ``function``.

    name : str : default CompositionInterfaceMechanism-<index>
        the name of the Mechanism.
        Specified in the **name** argument of the constructor for the Mechanism;
        if not is specified, a default is assigned by `MechanismRegistry`
        (see :doc:`Registry <LINK>` for conventions used in naming, including for default and duplicate names).

    prefs : Optional[PreferenceSet or specification dict : Mechanism.classPreferences]
        the `PreferenceSet` for Mechanism.
        Specified in the **prefs** argument of the constructor for the Mechanism;
        if it is not specified, a default is assigned using `classPreferences` defined in ``__init__.py``
        (see :doc:`PreferenceSet <LINK>` for details).

    """

    componentType = COMPOSITION_INTERFACE_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName:
        'CompositionInterfaceMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()
    paramClassDefaults.update({})
    paramNames = paramClassDefaults.keys()

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(
                     tc.any(Iterable, Mechanism, OutputState,
                            InputState)) = None,
                 function=Identity(),
                 composition=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.connected_to_composition = False

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  input_states=input_states,
                                                  params=params)

        super(CompositionInterfaceMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_states=input_states,
                             function=function,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
class ComparatorMechanism(ObjectiveMechanism):
    """
    ComparatorMechanism(                                \
        sample,                                         \
        target,                                         \
        input_states=[SAMPLE,TARGET]                    \
        function=LinearCombination(weights=[[-1],[1]],  \
        output_states=OUTCOME                           \
        params=None,                                    \
        name=None,                                      \
        prefs=None)

    Subclass of `ObjectiveMechanism` that compares the values of two `OutputStates <OutputState>`.

    COMMENT:
        Description:
            ComparatorMechanism is a subtype of the ObjectiveMechanism Subtype of the ProcssingMechanism Type
            of the Mechanism Category of the Component class.
            By default, it's function uses the LinearCombination Function to compare two input variables.
            COMPARISON_OPERATION (functionParams) determines whether the comparison is subtractive or divisive
            The function returns an array with the Hadamard (element-wise) differece/quotient of target vs. sample,
                as well as the mean, sum, sum of squares, and mean sum of squares of the comparison array

        Class attributes:
            + componentType (str): ComparatorMechanism
            + classPreference (PreferenceSet): Comparator_PreferenceSet, instantiated in __init__()
            + classPreferenceLevel (PreferenceLevel): PreferenceLevel.SUBTYPE
            + class_defaults.variable (value):  Comparator_DEFAULT_STARTING_POINT // QUESTION: What to change here
            + paramClassDefaults (dict): {FUNCTION_PARAMS:{COMPARISON_OPERATION: SUBTRACTION}}

        Class methods:
            None

        MechanismRegistry:
            All instances of ComparatorMechanism are registered in MechanismRegistry, which maintains an
              entry for the subclass, a count for all instances of it, and a dictionary of those instances
    COMMENT

    Arguments
    ---------

    sample : OutputState, Mechanism, value, or string
        specifies the value to compare with the `target` by the `function <ComparatorMechanism.function>`.

    target :  OutputState, Mechanism, value, or string
        specifies the value with which the `sample` is compared by the `function <ComparatorMechanism.function>`.

    input_states :  List[InputState, value, str or dict] or Dict[] : default [SAMPLE, TARGET]
        specifies the names and/or formats to use for the values of the sample and target InputStates;
        by default they are named *SAMPLE* and *TARGET*, and their formats are match the value of the OutputStates
        specified in the **sample** and **target** arguments, respectively (see `ComparatorMechanism_Structure`
        for additional details).

    function :  Function, function or method : default Distance(metric=DIFFERENCE)
        specifies the `function <Comparator.function>` used to compare the `sample` with the `target`.

    output_states :  List[OutputState, value, str or dict] or Dict[] : default [OUTCOME]
        specifies the OutputStates for the Mechanism;

    params :  Optional[Dict[param keyword: param value]]
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, its function, and/or a custom function and its parameters. Values specified for parameters in
        the dictionary override any assigned to those parameters in arguments of the
        constructor.

    name : str : default see `name <ComparatorMechanism.name>`
        specifies the name of the ComparatorMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the ComparatorMechanism; see `prefs <ComparatorMechanism.prefs>` for details.


    Attributes
    ----------

    COMMENT:
    default_variable : Optional[List[array] or 2d np.array]
    COMMENT

    sample : OutputState
        determines the value to compare with the `target` by the `function <ComparatorMechanism.function>`.

    target : OutputState
        determines the value with which `sample` is compared by the `function <ComparatorMechanism.function>`.

    input_states : ContentAddressableList[InputState, InputState]
        contains the two InputStates named, by default, *SAMPLE* and *TARGET*, each of which receives a
        `MappingProjection` from the OutputStates referenced by the `sample` and `target` attributes
        (see `ComparatorMechanism_Structure` for additional details).

    function : CombinationFunction, function or method
        used to compare the `sample` with the `target`.  It can be any PsyNeuLink `CombinationFunction`,
        or a python function that takes a 2d array with two items and returns a 1d array of the same length
        as the two input items.

    value : 1d np.array
        the result of the comparison carried out by the `function <ComparatorMechanism.function>`.

    output_state : OutputState
        contains the `primary <OutputState_Primary>` OutputState of the ComparatorMechanism; the default is
        its *OUTCOME* OutputState, the value of which is equal to the `value <ComparatorMechanism.value>`
        attribute of the ComparatorMechanism.

    output_states : ContentAddressableList[OutputState]
        contains, by default, only the *OUTCOME* (primary) OutputState of the ComparatorMechanism.

    output_values : 2d np.array
        contains one item that is the value of the *OUTCOME* OutputState.

    name : str
        the name of the ComparatorMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the ComparatorMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).


    """
    componentType = COMPARATOR_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'ComparatorCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(ObjectiveMechanism.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <ComparatorMechanism.variable>`

                    :default value: numpy.array([[0], [0]])
                    :type: numpy.ndarray
                    :read only: True

                function
                    see `function <ComparatorMechanism.function>`

                    :default value: `LinearCombination`(offset=0.0, operation=sum, scale=1.0, weights=numpy.array([[-1], [ 1]]))
                    :type: `Function`

                sample
                    see `sample <ComparatorMechanism.sample>`

                    :default value: None
                    :type:

                target
                    see `target <ComparatorMechanism.target>`

                    :default value: None
                    :type:

        """
        # By default, ComparatorMechanism compares two 1D np.array input_states
        variable = Parameter(np.array([[0], [0]]), read_only=True)
        function = Parameter(LinearCombination(weights=[[-1], [1]]),
                             stateful=False,
                             loggable=False)
        sample = None
        target = None

    # ComparatorMechanism parameter and control signal assignments):
    paramClassDefaults = Mechanism_Base.paramClassDefaults.copy()

    standard_output_states = ObjectiveMechanism.standard_output_states.copy()

    standard_output_states.extend([{
        NAME: SSE,
        FUNCTION: lambda x: np.sum(x * x)
    }, {
        NAME:
        MSE,
        FUNCTION:
        lambda x: np.sum(x * x) / safe_len(x)
    }])

    @tc.typecheck
    def __init__(
        self,
        default_variable=None,
        sample: tc.optional(
            tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None,
        target: tc.optional(
            tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None,
        function=LinearCombination(weights=[[-1], [1]]),
        output_states: tc.optional(tc.any(str, Iterable)) = (OUTCOME, ),
        params=None,
        name=None,
        prefs: is_pref_set = None,
        **
        input_states  # IMPLEMENTATION NOTE: this is for backward compatibility
    ):

        input_states = self._merge_legacy_constructor_args(
            sample, target, default_variable, input_states)

        # Default output_states is specified in constructor as a tuple rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if isinstance(output_states, (str, tuple)):
            output_states = list(output_states)

        # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment
        #                     (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE
        # output_states = output_states or [OUTCOME, MSE]

        # Create a StandardOutputStates object from the list of stand_output_states specified for the class
        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super().__init__(  # monitor=[sample, target],
            monitor=input_states,
            function=function,
            output_states=output_states.copy(
            ),  # prevent default from getting overwritten by later assign
            params=params,
            name=name,
            prefs=prefs,
            context=ContextFlags.CONSTRUCTOR)

    def _validate_params(self, request_set, target_set=None, context=None):
        """If sample and target values are specified, validate that they are compatible
        """

        if INPUT_STATES in request_set and request_set[
                INPUT_STATES] is not None:
            input_states = request_set[INPUT_STATES]

            # Validate that there are exactly two input_states (for sample and target)
            num_input_states = len(input_states)
            if num_input_states != 2:
                raise ComparatorMechanismError(
                    "{} arg is specified for {} ({}), so it must have exactly 2 items, "
                    "one each for {} and {}".format(INPUT_STATES,
                                                    self.__class__.__name__,
                                                    len(input_states), SAMPLE,
                                                    TARGET))

            # Validate that input_states are specified as dicts
            if not all(
                    isinstance(input_state, dict)
                    for input_state in input_states):
                raise ComparatorMechanismError(
                    "PROGRAM ERROR: all items in input_state args must be converted to dicts"
                    " by calling State._parse_state_spec() before calling super().__init__"
                )

            # Validate length of variable for sample = target
            if VARIABLE in input_states[0]:
                # input_states arg specified in standard state specification dict format
                lengths = [
                    len(input_state[VARIABLE]) for input_state in input_states
                ]
            else:
                # input_states arg specified in {<STATE_NAME>:<STATE SPECIFICATION DICT>} format
                lengths = [
                    len(list(input_state_dict.values())[0][VARIABLE])
                    for input_state_dict in input_states
                ]

            if lengths[0] != lengths[1]:
                raise ComparatorMechanismError(
                    "Length of value specified for {} InputState of {} ({}) must be "
                    "same as length of value specified for {} ({})".format(
                        SAMPLE, self.__class__.__name__, lengths[0], TARGET,
                        lengths[1]))

        elif SAMPLE in request_set and TARGET in request_set:

            sample = request_set[SAMPLE]
            if isinstance(sample, InputState):
                sample_value = sample.value
            elif isinstance(sample, Mechanism):
                sample_value = sample.input_value[0]
            elif is_value_spec(sample):
                sample_value = sample
            else:
                sample_value = None

            target = request_set[TARGET]
            if isinstance(target, InputState):
                target_value = target.value
            elif isinstance(target, Mechanism):
                target_value = target.input_value[0]
            elif is_value_spec(target):
                target_value = target
            else:
                target_value = None

            if sample is not None and target is not None:
                if not iscompatible(
                        sample, target, **{
                            kwCompatibilityLength: True,
                            kwCompatibilityNumeric: True
                        }):
                    raise ComparatorMechanismError(
                        "The length of the sample ({}) must be the same as for the target ({})"
                        "for {} {}".format(len(sample), len(target),
                                           self.__class__.__name__, self.name))

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

    def _merge_legacy_constructor_args(self,
                                       sample,
                                       target,
                                       default_variable=None,
                                       input_states=None):

        # USE sample and target TO CREATE AN InputState specfication dictionary for each;
        # DO SAME FOR InputStates argument, USE TO OVERWRITE ANY SPECIFICATIONS IN sample AND target DICTS
        # TRY tuple format AS WAY OF PROVIDED CONSOLIDATED variable AND OutputState specifications

        sample_dict = _parse_state_spec(owner=self,
                                        state_type=InputState,
                                        state_spec=sample,
                                        name=SAMPLE)

        target_dict = _parse_state_spec(owner=self,
                                        state_type=InputState,
                                        state_spec=target,
                                        name=TARGET)

        # If either the default_variable arg or the input_states arg is provided:
        #    - validate that there are exactly two items in default_variable or input_states list
        #    - if there is an input_states list, parse it and use it to update sample and target dicts
        if input_states:
            input_states = input_states[INPUT_STATES]
            # print("type input_states = {}".format(type(input_states)))
            if not isinstance(input_states, list):
                raise ComparatorMechanismError(
                    "If an \'{}\' argument is included in the constructor for a {} "
                    "it must be a list with two {} specifications.".format(
                        INPUT_STATES, ComparatorMechanism.__name__,
                        InputState.__name__))

        input_states = input_states or default_variable

        if input_states is not None:
            if len(input_states) != 2:
                raise ComparatorMechanismError(
                    "If an \'input_states\' arg is "
                    "included in the constructor for "
                    "a {}, it must be a list with "
                    "exactly two items (not {})".format(
                        ComparatorMechanism.__name__, len(input_states)))

            sample_input_state_dict = _parse_state_spec(
                owner=self,
                state_type=InputState,
                state_spec=input_states[0],
                name=SAMPLE,
                value=None)

            target_input_state_dict = _parse_state_spec(
                owner=self,
                state_type=InputState,
                state_spec=input_states[1],
                name=TARGET,
                value=None)

            sample_dict = recursive_update(sample_dict,
                                           sample_input_state_dict)
            target_dict = recursive_update(target_dict,
                                           target_input_state_dict)

        return [sample_dict, target_dict]
Пример #11
0
class LeabraFunction(Function_Base):
    """
    LeabraFunction(             \
        default_variable=None,  \
        network=None,           \
        params=None,            \
        owner=None,             \
        prefs=None)

    .. _LeabraFunction:

    LeabraFunction is a custom function that lives inside the LeabraMechanism. As a function, it transforms the
    variable by providing it as input to the leabra network inside the LeabraFunction.

    Arguments
    ---------

    default_variable : number or np.array : default np.zeros() (array of zeros)
        specifies a template for the input to the leabra network.

    network : leabra.Network
        specifies the leabra network to be used.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterPort_Specification>` that specifies the parameters for the
        function.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    owner : Component
        `component <Component>` to which to assign the Function.

    Attributes
    ----------

    variable : number or np.array
        contains value to be transformed.

    network : leabra.Network
        the leabra network that is being used

    owner : Mechanism
        `component <Component>` to which the Function has been assigned.

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the LeabraMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see `Preferences`
        for details).

    """

    componentType = LEABRA_FUNCTION_TYPE
    componentName = LEABRA_FUNCTION

    classPreferences = {
        PREFERENCE_SET_NAME: 'LeabraFunctionClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(Function_Base.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <LeabraFunction.variable>`

                    :default value: numpy.array([[0], [0]])
                    :type: ``numpy.ndarray``
                    :read only: True

                network
                    see `network <LeabraFunction.network>`

                    :default value: None
                    :type:
        """
        variable = Parameter(np.array([[0], [0]]),
                             read_only=True,
                             pnl_internal=True,
                             constructor_argument='default_variable')
        network = None

    def __init__(self,
                 default_variable=None,
                 network=None,
                 params=None,
                 owner=None,
                 prefs=None):

        if network is None:
            raise LeabraError(
                'network was None. Cannot create function for Leabra Mechanism if network is not specified.'
            )

        if default_variable is None:
            input_size = len(network.layers[0].units)
            output_size = len(network.layers[-1].units)
            default_variable = [np.zeros(input_size), np.zeros(output_size)]

        super().__init__(
            default_variable=default_variable,
            network=network,
            params=params,
            owner=owner,
            prefs=prefs,
        )

    def _validate_variable(self, variable, context=None):
        if not isinstance(variable, (list, np.ndarray, numbers.Number)):
            raise LeabraError(
                "Input Error: the input variable ({}) was of type {}, but instead should be a list, "
                "numpy array, or number.".format(variable, type(variable)))

        input_size = len(self.network.layers[0].units)
        output_size = len(self.network.layers[-1].units)
        if (not hasattr(self, "owner")) or (not hasattr(
                self.owner,
                "training_flag")) or self.owner.training_flag is False:
            if len(convert_to_2d_input(variable)[0]) != input_size:
                # convert_to_2d_input(variable[0]) is just in case variable is a 2D array rather than a vector
                raise LeabraError(
                    "Input Error: the input was {}, which was of an incompatible length with the "
                    "input_size, which should be {}.".format(
                        convert_to_2d_input(variable)[0], input_size))
        else:
            if len(convert_to_2d_input(variable)[0]) != input_size or len(
                    convert_to_2d_input(variable)[1]) != output_size:
                raise LeabraError(
                    "Input Error: the input variable was {}, which was of an incompatible length with "
                    "the input_size or output_size, which should be {} and {} respectively."
                    .format(variable, input_size, output_size))
        return variable

    def _validate_params(self, request_set, target_set=None, context=None):
        if not isinstance(request_set[NETWORK], leabra.Network):
            raise LeabraError(
                "Error: the network given ({}) was of type {}, but instead must be a leabra Network."
                .format(request_set[NETWORK], type(request_set[NETWORK])))
        super()._validate_params(request_set, target_set, context)

    def _function(
        self,
        variable=None,
        context=None,
        params=None,
    ):
        network = self.parameters.network._get(context)
        # HACK: otherwise the INITIALIZING function executions impact the state of the leabra network
        if self.is_initializing:
            output_size = len(network.layers[-1].units)
            return np.zeros(output_size)

        try:
            training_flag = self.owner.parameters.training_flag._get(context)
        except AttributeError:
            training_flag = False

        # None or False
        if not training_flag:
            if isinstance(variable[0], (list, np.ndarray)):
                variable = variable[0]
            return run_leabra_network(network, input_pattern=variable)

        else:
            # variable = convert_to_2d_input(variable)  # FIX: buggy, doesn't handle lists well
            if len(variable) != 2:
                raise LeabraError(
                    "Input Error: the input given ({}) for training was not the right format: the input "
                    "should be a 2D array containing two vectors, corresponding to the input and the "
                    "training target.".format(variable))
            if len(variable[0]) != len(network.layers[0].units) or len(
                    variable[1]) != len(network.layers[-1].units):
                raise LeabraError(
                    "Input Error: the input given ({}) was not the right format: it should be a 2D array "
                    "containing two vectors, corresponding to the input (which should be length {}) and "
                    "the training target (which should be length {})".format(
                        variable, network.layers[0],
                        len(network.layers[-1].units)))
            return train_leabra_network(network,
                                        input_pattern=variable[0],
                                        output_pattern=variable[1])
Пример #12
0
class ArgumentTherapy(Function_Base):
    """
    ArgumentTherapy(                   \
         variable,                     \
         propensity=Manner.CONTRARIAN, \
         pertinacity=10.0              \
         params=None,                  \
         owner=None,                   \
         name=None,                    \
         prefs=None                    \
         )

    .. _ArgumentTherapist:

    Return `True` or :keyword:`False` according to the manner of the therapist.

    Arguments
    ---------

    variable : boolean or statement that resolves to one : default class_defaults.variable
        assertion for which a therapeutic response will be offered.

    propensity : Manner value : default Manner.CONTRARIAN
        specifies preferred therapeutic manner

    pertinacity : float : default 10.0
        specifies therapeutic consistency

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterPort_Specification>` that specifies the parameters for the
        function.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    owner : Component
        `component <Component>` to which to assign the Function.

    name : str : default see `name <Function.name>`
        specifies the name of the Function.

    prefs : PreferenceSet or specification dict : default Function.classPreferences
        specifies the `PreferenceSet` for the Function (see `prefs <Function_Base.prefs>` for details).


    Attributes
    ----------

    variable : boolean
        assertion to which a therapeutic response is made.

    propensity : Manner value : default Manner.CONTRARIAN
        determines therapeutic manner:  tendency to agree or disagree.

    pertinacity : float : default 10.0
        determines consistency with which the manner complies with the propensity.

    owner : Component
        `component <Component>` to which the Function has been assigned.

    name : str
        the name of the Function; if it is not specified in the **name** argument of the constructor, a
        default is assigned by FunctionRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict : Function.classPreferences
        the `PreferenceSet` for function; if it is not specified in the **prefs** argument of the Function's
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).


    """

    # Function componentName and type (defined at top of module)
    componentName = ARGUMENT_THERAPY_FUNCTION
    componentType = EXAMPLE_FUNCTION_TYPE

    classPreferences = {
        PREFERENCE_SET_NAME: 'ExampleClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE),
    }

    # Mode indicators
    class Manner(Enum):
        OBSEQUIOUS = 0
        CONTRARIAN = 1

    # Parameter class defaults
    # These are used both to type-cast the params, and as defaults if none are assigned
    #  in the initialization call or later (using either _instantiate_defaults or during a function call)

    paramClassDefaults = Function_Base.paramClassDefaults.copy()
    paramClassDefaults.update({
                               PARAMETER_PORT_PARAMS: None
                               # PROPENSITY: Manner.CONTRARIAN,
                               # PERTINACITY:  10
                               })

    def __init__(self,
                 default_variable=None,
                 propensity=10.0,
                 pertincacity=Manner.CONTRARIAN,
                 params=None,
                 owner=None,
                 prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(propensity=propensity,
                                                  pertinacity=pertincacity,
                                                  params=params)

        super().__init__(default_variable=default_variable,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         )

    def _validate_variable(self, variable, context=None):
        """Validates variable and returns validated value

        This overrides the class method, to perform more detailed type checking
        See explanation in class method.
        Note: this method (or the class version) is called only if the parameter_validation attribute is `True`

        :param variable: (anything but a dict) - variable to be validated:
        :param context: (str)
        :return variable: - validated
        """

        if type(variable) == type(self.class_defaults.variable) or \
                (isinstance(variable, numbers.Number) and isinstance(self.class_defaults.variable, numbers.Number)):
            return variable
        else:
            raise FunctionError(f"Variable must be {type(self.class_defaults.variable)}.")

    def _validate_params(self, request_set, target_set=None, context=None):
        """Validates variable and /or params and assigns to targets

        This overrides the class method, to perform more detailed type checking
        See explanation in class method.
        Note: this method (or the class version) is called only if the parameter_validation attribute is `True`

        :param request_set: (dict) - params to be validated
        :param target_set: (dict) - destination of validated params
        :return none:
        """

        message = ""

        # Check params
        for param_name, param_value in request_set.items():

            if param_name == PROPENSITY:
                if isinstance(param_value, ArgumentTherapy.Manner):
                    # target_set[self.PROPENSITY] = param_value
                    pass  # This leaves param in request_set, clear to be assigned to target_set in call to super below
                else:
                    message = "Propensity must be of type Example.Mode"
                continue

            # Validate param
            if param_name == PERTINACITY:
                if isinstance(param_value, numbers.Number) and 0 <= param_value <= 10:
                    # target_set[PERTINACITY] = param_value
                    pass  # This leaves param in request_set, clear to be assigned to target_set in call to super below
                else:
                    message += "Pertinacity must be a number between 0 and 10"
                continue

        if message:
            raise FunctionError(message)

        super()._validate_params(request_set, target_set, context)

    def _function(self,
                 variable=None,
                 context=None,
                 params=None,
                 ):
        """
        Returns a boolean that is (or tends to be) the same as or opposite the one passed in.

        Arguments
        ---------

        variable : boolean : default class_defaults.variable
           an assertion to which a therapeutic response is made.

        params : Dict[param keyword: param value] : default None
            a `parameter dictionary <ParameterPort_Specification>` that specifies the parameters for the
            function.  Values specified for parameters in the dictionary override any assigned to those parameters in
            arguments of the constructor.


        Returns
        -------

        therapeutic response : boolean

        """
        # Compute the function
        statement = variable
        propensity = self.get_current_function_param(PROPENSITY, context)
        pertinacity = self.get_current_function_param(PERTINACITY, context)
        whim = randint(-10, 10)

        if propensity == self.Manner.OBSEQUIOUS:
            value = whim < pertinacity

        elif propensity == self.Manner.CONTRARIAN:
            value = whim > pertinacity

        else:
            raise FunctionError("This should not happen if parameter_validation == True;  check its value")

        return self.convert_output_type(value)
Пример #13
0
INSTANCE_DEFAULT_PREFERENCES = 'InstanceDefaultPreferences'

# Level default preferences dicts:

BasePreferenceSetPrefs = {
    VERBOSE_PREF,
    PARAM_VALIDATION_PREF,
    REPORT_OUTPUT_PREF,
    LOG_PREF,
    DELIVERY_PREF,
    RUNTIME_PARAM_MODULATION_PREF
}

CompositionDefaultPreferencesDict = {
    PREFERENCE_SET_NAME: COMPOSITION_DEFAULT_PREFERENCES,
    VERBOSE_PREF: PreferenceEntry(False, PreferenceLevel.COMPOSITION),
    PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.COMPOSITION),
    REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.COMPOSITION),
    LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.COMPOSITION)}

CategoryDefaultPreferencesDict = {
    PREFERENCE_SET_NAME: CATEGORY_DEFAULT_PREFERENCES,
    VERBOSE_PREF: PreferenceEntry(False, PreferenceLevel.CATEGORY),
    PARAM_VALIDATION_PREF: PreferenceEntry(True, PreferenceLevel.CATEGORY),
    REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.CATEGORY),
    LOG_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    DELIVERY_PREF: PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    RUNTIME_PARAM_MODULATION_PREF: PreferenceEntry(Modulation.MULTIPLY,PreferenceLevel.CATEGORY)}
Пример #14
0
class ProcessingMechanism(ProcessingMechanism_Base):
    """
    Implements instance of `ProcessingMechanism_Base <ProcessingMechanism>` subclass of `Mechanism <Mechanism>`.
    See `Mechanism <Mechanism_Class_Reference>` and `subclasses <ProcessingMechanism_Subtypes>` of ProcessingMechanism
    for arguments and additional attributes.

    Attributes
    ----------

    standard_output_ports : list[dict]
      list of the dictionary specifications for `StandardOutputPorts <OutputPort_Standard>` that can be assigned as
      `OutputPorts <OutputPort>`, in addition to the `standard_output_ports <Mechanism_Base.standard_output_ports>`
      of a `Mechanism <Mechanism>`; each assigns as the `value <OutputPort.value>` of the OutputPort a quantity
      calculated over the elements of the first item in the outermost dimension (axis 0) of the Mechanism`s `value
      <Mechanism_Base.value>`. `Subclasses <ProcessingMechanism_Subtypes>` of ProcessingMechanism may extend this
      list to include additional `StandardOutputPorts <OutputPort_Standard>`.

     *MEAN* : float
       mean of the elements.

     *MEDIAN* : float
       median of the elements.

     *STANDARD_DEVIATION* : float
       standard deviation of the elements.

     *VARIANCE* : float
       variance of the elements.

     *MAX_VAL* : float
       greatest signed value of the elements.

     *MAX_ABS_VAL* : float
       greatest absolute value of the elements.

     *MAX_ONE_HOT* : float
       element with the greatest signed value is assigned that value, all others are assigned 0.

     *MAX_ABS_ONE_HOT* : float
       element with the greatest absolute value is assigned that value, all others are assigned 0.

     *MAX_INDICATOR* : 1d array
       element with the greatest signed value is assigned 1, all others are assigned 0.

     *MAX_ABS_INDICATOR* : 1d array
       element with the greatest absolute value is assigned 1, all others are assigned 0.

     *PROB* : float
       element chosen probabilistically based on softmax distribution is assigned its value, all others are assigned 0.

    """

    componentType = PROCESSING_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TYPE_DEFAULT_PREFERENCES
    classPreferences = {
        PREFERENCE_SET_NAME: 'ProcessingMechanismCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(tc.any(list, dict)) = None,
                 output_ports: tc.optional(tc.any(str, Iterable)) = None,
                 function=Linear,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):
        super(ProcessingMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_ports=input_ports,
                             function=function,
                             output_ports=output_ports,
                             params=params,
                             name=name,
                             prefs=prefs,
                             **kwargs)
Пример #15
0
class IntegratorMechanism(ProcessingMechanism_Base):
    """
    IntegratorMechanism(                   \
    default_variable=None,                 \
    size=None,                             \
    function=AdaptiveIntegrator(rate=0.5), \
    params=None,                           \
    name=None,                             \
    prefs=None)

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that integrates its input.

    COMMENT:
        Description:
            - DOCUMENT:

        Class attributes:
            + componentType (str): SigmoidLayer
            + classPreference (PreferenceSet): SigmoidLayer_PreferenceSet, instantiated in __init__()
            + classPreferenceLevel (PreferenceLevel): PreferenceLevel.TYPE
            + class_defaults.variable (value):  SigmoidLayer_DEFAULT_BIAS
            + paramClassDefaults (dict): {FUNCTION_PARAMS:{kwSigmoidLayer_Unitst: kwSigmoidLayer_NetInput
                                                                     kwSigmoidLayer_Gain: SigmoidLayer_DEFAULT_GAIN
                                                                     kwSigmoidLayer_Bias: SigmoidLayer_DEFAULT_BIAS}}
        Class methods:
            None

        MechanismRegistry:
           All instances of SigmoidLayer are registered in MechanismRegistry, which maintains an entry for the subclass,
              a count for all instances of it, and a dictionary of those instances

    COMMENT

    Arguments
    ---------

    default_variable : number, list or np.ndarray
        the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` methods;
        also serves as a template to specify the length of `variable <IntegratorMechanism.variable>` for
        `function <IntegratorMechanism.function>`, and the `primary outputState <OutputState_Primary>` of the
        Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            T1 = TransferMechanism(size = [3, 2])
            T2 = TransferMechanism(default_variable = [[0, 0, 0], [0, 0]])

    function : IntegratorFunction : default IntegratorFunction
        specifies the function used to integrate the input.  Must take a single numeric value, or a list or np.array
        of values, and return one of the same form.

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, parameters for its `function <IntegratorMechanism.function>`, and/or a custom function and its
        parameters.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    name : str : default see `name <IntegratorMechanism.name>`
        specifies the name of the IntegratorMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the IntegratorMechanism; see `prefs <IntegratorMechanism.prefs>` for details.

    Attributes
    ----------
    variable : value: default
        the input to Mechanism's ``function``.

    name : str
        the name of the IntegratorMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the IntegratorMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).

    """

    componentType = INTEGRATOR_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'IntegratorMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(ProcessingMechanism_Base.Parameters):
        """
            Attributes
            ----------

                function
                    see `function <IntegratorMechanism.function>`

                    :default value: `AdaptiveIntegrator`(initializer=numpy.array([0]), rate=0.5)
                    :type: `Function`

        """
        function = Parameter(AdaptiveIntegrator(rate=0.5),
                             stateful=False,
                             loggable=False)

    paramClassDefaults = ProcessingMechanism_Base.paramClassDefaults.copy()
    # paramClassDefaults.update({
    #     OUTPUT_STATES:[PREDICTION_MECHANISM_OUTPUT]
    # })

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 function=None,
                 output_states: tc.optional(tc.any(str, Iterable)) = RESULTS,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):
        """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__
        """

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(input_states=input_states,
                                                  output_states=output_states,
                                                  function=function,
                                                  params=params)

        super(IntegratorMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             function=function,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
Пример #16
0
class PredictionErrorMechanism(ComparatorMechanism):
    """
    PredictionErrorMechanism(                                \
        sample,                                              \
        target,                                              \
        function=PredictionErrorDeltaFunction,               \
        output_states=[OUTCOME],                             \
        params=None,                                         \
        name=None,                                           \
        prefs=None)

    Calculates the prediction error between the predicted reward and the target

    Arguments
    ---------

    sample : OutputState, Mechanism_Base, dict, number, or str
        specifies the SAMPLE InputState, which will be evaluated by
        the function

    target : OutputState, Mechanism_Base, dict, number, or str
        specifies the TARGET InputState, which will be used by the function to
        evaluate the sample

    function : CombinationFunction, ObjectiveFunction, function, or method : default PredictionErrorDeltaFunction
        the function used to evaluate the sample and target inputs.

    output_states : str, Iterable : default OUTCOME
        by default, contains only the *OUTCOME* (`primary <OutputState_Primary>`)
        OutputState of the PredictionErrorMechanism.

    learning_rate : Number : default 0.3
        controls the weight of later timesteps compared to earlier ones. Higher
        rates weight later timesteps more heavily than previous ones.

    name : str
        the name of the PredictionErrorMechanism; if it is not specified in the
        **name** argument of the constructor, a default is assigned by
        MechanismRegistry (see `Naming` for conventions used for default and
        duplicate names).


    Attributes
    ----------

    sample : OutputState, Mechanism_Base, dict, number, or str
        specifies the SAMPLE InputState, which will be evaluated by
        the function

    target : OutputState, Mechanism_Base, dict, number, or str
        specifies the TARGET InputState, which will be used by the function to
        evaluate the sample

    function : CombinationFunction, ObjectiveFunction, Function, or method : default PredictionErrorDeltaFunction
        the function used to evaluate the sample and target inputs.

    output_states : str, Iterable : default OUTCOME
        by default, contains only the *OUTCOME* (`primary <OutputState_Primary>`)
        OutputState of the PredictionErrorMechanism.

    learning_rate : Number : default 0.3
        controls the weight of later timesteps compared to earlier ones. Higher
        rates weight later timesteps more heavily than previous ones.

    name : str
        the name of the PredictionErrorMechanism; if it is not specified in the
        **name** argument of the constructor, a default is assigned by
        MechanismRegistry (see `Naming` for conventions used for default and
        duplicate names).

    """
    componentType = PREDICTION_ERROR_MECHANISM

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    classPreferences = {
        kwPreferenceSetName: 'PredictionErrorMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(ComparatorMechanism.Parameters):
        """
            Attributes
            ----------

                variable
                    see `variable <PredictionErrorMechanism.variable>`

                    :default value: None
                    :type:
                    :read only: True

                function
                    see `function <PredictionErrorMechanism.function>`

                    :default value: `PredictionErrorDeltaFunction`
                    :type: `Function`

                learning_rate
                    see `learning_rate <PredictionErrorMechanism.learning_rate>`

                    :default value: 0.3
                    :type: float

        """
        variable = Parameter(None, read_only=True)
        learning_rate = Parameter(0.3, modulable=True)
        function = PredictionErrorDeltaFunction
        sample = None
        target = None

    paramClassDefaults = ComparatorMechanism.paramClassDefaults.copy()
    standard_output_states = ComparatorMechanism.standard_output_states.copy()

    @tc.typecheck
    def __init__(self,
                 sample: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_states: tc.optional(tc.any(str, Iterable)) = OUTCOME,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        input_states = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_states=input_states,
                         function=function,
                         output_states=output_states,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=context)

    def _parse_function_variable(self, variable, execution_id=None, context=None):
        # TODO: update to take sample/reward from variable
        # sample = x(t) in Montague on first run, V(t) on subsequent runs
        sample = self.input_states[SAMPLE].parameters.value.get(execution_id)
        reward = self.input_states[TARGET].parameters.value.get(execution_id)

        return [sample, reward]

    def _execute(self, variable=None, execution_id=None, runtime_params=None, context=None):
        delta = super()._execute(variable=variable, execution_id=execution_id, runtime_params=runtime_params, context=context)
        delta = delta[0][1:]
        # delta = delta[1:]
        delta = np.append(delta, 0)
        return delta
Пример #17
0
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel
from psyneulink.core.globals.utilities import Modulation

__all__ = [
    'MechanismPreferenceSet', 'runtimeParamModulationPrefCategoryDefault',
    'runtimeParamModulationPrefInstanceDefault',
    'runtimeParamModulationPrefTypeDefault'
]

# MODIFIED 11/29/16 OLD:
# # Keypaths for preferences:
# RUNTIME_PARAM_MODULATION_PREF = '_runtime_param_modulation_pref'
# MODIFIED 11/29/16 END

# Default PreferenceSets:
runtimeParamModulationPrefInstanceDefault = PreferenceEntry(
    Modulation.OVERRIDE, PreferenceLevel.INSTANCE)
runtimeParamModulationPrefTypeDefault = PreferenceEntry(
    Modulation.ADD, PreferenceLevel.TYPE)
# runtimeParamModulationPrefCategoryDefault = PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.CATEGORY)
runtimeParamModulationPrefCategoryDefault = PreferenceEntry(
    False, PreferenceLevel.CATEGORY)

reportOutputPrefInstanceDefault = PreferenceEntry(False,
                                                  PreferenceLevel.INSTANCE)
logPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
verbosePrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
paramValidationPrefInstanceDefault = PreferenceEntry(False,
                                                     PreferenceLevel.INSTANCE)


class MechanismPreferenceSet(BasePreferenceSet):
Пример #18
0
class CompositionInterfaceMechanism(ProcessingMechanism_Base):
    """
    CompositionInterfaceMechanism(  \
        function=Identity())

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that acts as interface between a Composition and its
    inputs from and outputs to the environment or other Mechanisms (if it is a nested Composition).

    See `Mechanism <Mechanism_Class_Reference>` for arguments and additonal attributes.

    Attributes
    ----------

    function : InterfaceFunction : default Identity
        the function used to transform the variable before assigning it to the Mechanism's OutputPort(s)

    """

    componentType = COMPOSITION_INTERFACE_MECHANISM
    outputPortTypes = [OutputPort, ControlSignal]

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TYPE_DEFAULT_PREFERENCES
    classPreferences = {
        PREFERENCE_SET_NAME: 'CompositionInterfaceMechanismCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)}

    class Parameters(ProcessingMechanism_Base.Parameters):
        """
            Attributes
            ----------

                function
                    see `function <CompositionInterfaceMechanism.function>`

                    :default value: `Identity`
                    :type: `Function`
        """
        function = Parameter(Identity, stateful=False, loggable=False)

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(tc.optional(tc.any(Iterable, Mechanism, OutputPort, InputPort))) = None,
                 function=None,
                 composition=None,
                 port_map=None,
                 params=None,
                 name=None,
                 prefs:is_pref_set=None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.port_map = port_map
        self.connected_to_composition = False
        self.user_added_ports = {
            INPUT_PORTS: set(),
            OUTPUT_PORTS: set()
        }
        super(CompositionInterfaceMechanism, self).__init__(default_variable=default_variable,
                                                            size=size,
                                                            input_ports=input_ports,
                                                            function=function,
                                                            params=params,
                                                            name=name,
                                                            prefs=prefs,
                                                            )

    @handle_external_context()
    def add_ports(self, ports, context=None):
        ports = super(CompositionInterfaceMechanism, self).add_ports(ports, context=context)
        if context.source == ContextFlags.COMMAND_LINE:
            warnings.warn(
                'You are attempting to add custom ports to a CIM, which can result in unpredictable behavior and '
                'is therefore recommended against. If suitable, you should instead add ports to the mechanism(s) '
                'that project to or are projected to from the CIM.')
            if ports[INPUT_PORTS]:
                self.user_added_ports[INPUT_PORTS].update([port for port in ports[INPUT_PORTS].data])
            if ports[OUTPUT_PORTS]:
                self.user_added_ports[OUTPUT_PORTS].update([port for port in ports[OUTPUT_PORTS].data])
        return ports

    @handle_external_context()
    def remove_ports(self, ports, context=None):
        super(CompositionInterfaceMechanism, self).remove_ports(ports, context)
        input_ports_marked_for_deletion = set()
        for port in self.user_added_ports[INPUT_PORTS]:
            if port not in self.input_ports:
                input_ports_marked_for_deletion.add(port)
        self.user_added_ports[INPUT_PORTS] = self.user_added_ports[INPUT_PORTS] - input_ports_marked_for_deletion
        output_ports_marked_for_deletion = set()
        for port in self.user_added_ports[OUTPUT_PORTS]:
            if port not in self.output_ports:
                output_ports_marked_for_deletion.add(port)
        self.user_added_ports[OUTPUT_PORTS] = self.user_added_ports[OUTPUT_PORTS] - output_ports_marked_for_deletion
Пример #19
0
class ProcessingMechanism(ProcessingMechanism_Base):
    """
    ProcessingMechanism(                            \
    default_variable=None,                               \
    size=None,                                              \
    function=Linear, \
    params=None,                                            \
    name=None,                                              \
    prefs=None)

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that does not have any specialized features.

    Arguments
    ---------

    default_variable : number, list or np.ndarray
        the input to the Mechanism to use if none is provided in a call to its
        `execute <Mechanism_Base.execute>` or `run <Mechanism_Base.run>` methods;
        also serves as a template to specify the length of `variable <ProcessingMechanism.variable>` for
        `function <ProcessingMechanism.function>`, and the `primary outputState <OutputState_Primary>` of the
        Mechanism.

    size : int, list or np.ndarray of ints
        specifies default_variable as array(s) of zeros if **default_variable** is not passed as an argument;
        if **default_variable** is specified, it takes precedence over the specification of **size**.
        As an example, the following mechanisms are equivalent::
            P1 = ProcessingMechanism(size = [3, 2])
            P2 = ProcessingMechanism(default_variable = [[0, 0, 0], [0, 0]])

    function : PsyNeuLink Function : default Linear
        specifies the function used to compute the output

    params : Dict[param keyword: param value] : default None
        a `parameter dictionary <ParameterState_Specification>` that can be used to specify the parameters for
        the Mechanism, parameters for its `function <ProcessingMechanism.function>`, and/or a custom function and its
        parameters.  Values specified for parameters in the dictionary override any assigned to those parameters in
        arguments of the constructor.

    name : str : default see `name <ProcessingMechanism.name>`
        specifies the name of the ProcessingMechanism.

    prefs : PreferenceSet or specification dict : default Mechanism.classPreferences
        specifies the `PreferenceSet` for the ProcessingMechanism; see `prefs <ProcessingMechanism.prefs>` for details.

    Attributes
    ----------
    variable : value: default
        the input to Mechanism's `function`.

    name : str
        the name of the ProcessingMechanism; if it is not specified in the **name** argument of the constructor, a
        default is assigned by MechanismRegistry (see `Naming` for conventions used for default and duplicate names).

    prefs : PreferenceSet or specification dict
        the `PreferenceSet` for the ProcessingMechanism; if it is not specified in the **prefs** argument of the
        constructor, a default is assigned using `classPreferences` defined in __init__.py (see :doc:`PreferenceSet
        <LINK>` for details).

    """

    componentType = PROCESSING_MECHANISM

    class Parameters(ProcessingMechanism_Base.Parameters):
        """
            Attributes
            ----------

                execution_count
                    see `execution_count <ProcessingMechanism.execution_count>`

                    :default value: 0
                    :type: int
                    :read only: True

        """
        # not stateful because this really is just a global counter, for accurate counts
        # should use schedulers which store this info
        execution_count = Parameter(0,
                                    read_only=True,
                                    loggable=False,
                                    stateful=False,
                                    fallback_default=True)

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TypeDefaultPreferences
    classPreferences = {
        kwPreferenceSetName: 'ProcessingMechanismCustomClassPreferences',
        kpReportOutputPref: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    paramClassDefaults = ProcessingMechanism_Base.paramClassDefaults.copy()

    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 output_states: tc.optional(tc.any(str, Iterable)) = None,
                 function=Linear,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  params=params)

        super(ProcessingMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_states=input_states,
                             function=function,
                             output_states=output_states,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
#
#
# ***************************************** CompositionPreferenceSet ***************************************************

from psyneulink.core.globals.preferences.basepreferenceset import BasePreferenceSet, BasePreferenceSetPrefs
from psyneulink.core.globals.preferences.preferenceset import PreferenceEntry, PreferenceLevel

__all__ = [
    'CompositionPreferenceSet', 'recordSimulationPrefCategoryDefault', 'recordSimulationPrefInstanceDefault',
    'recordSimulationPrefTypeDefault', 'RECORD_SIMULATION_PREF', 'RECORD_SIMULATION_PREF'
]

RECORD_SIMULATION_PREF = RECORD_SIMULATION_PREF = '_record_simulation_pref'

# Defaults ffor recordSimulationPref:
recordSimulationPrefInstanceDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
recordSimulationPrefTypeDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)
recordSimulationPrefCategoryDefault = PreferenceEntry(False, PreferenceLevel.INSTANCE)

CompositionPreferenceSetPrefs = BasePreferenceSetPrefs.copy()
CompositionPreferenceSetPrefs.add(RECORD_SIMULATION_PREF)


def is_composition_pref(pref):
    return pref in CompositionPreferenceSetPrefs


def is_composition_pref_set(pref):
    if pref is None:
        return True
    if isinstance(pref, (BasePreferenceSet, type(None))):
Пример #21
0
class IntegratorMechanism(ProcessingMechanism_Base):
    """
    IntegratorMechanism( \
        function=AdaptiveIntegrator(rate=0.5))

    Subclass of `ProcessingMechanism <ProcessingMechanism>` that integrates its input.
    See `Mechanism <Mechanism_Class_Reference>` for additional arguments and attributes.


    Arguments
    ---------

    function : IntegratorFunction : default IntegratorFunction
        specifies the function used to integrate the input.  Must take a single numeric value, or a list or np.array
        of values, and return one of the same form.

    """

    componentType = INTEGRATOR_MECHANISM

    classPreferenceLevel = PreferenceLevel.TYPE
    # These will override those specified in TYPE_DEFAULT_PREFERENCES
    classPreferences = {
        PREFERENCE_SET_NAME: 'IntegratorMechanismCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    class Parameters(ProcessingMechanism_Base.Parameters):
        """
            Attributes
            ----------

                function
                    see `function <IntegratorMechanism.function>`

                    :default value: `AdaptiveIntegrator`(initializer=numpy.array([0]), rate=0.5)
                    :type: `Function`
        """
        function = Parameter(AdaptiveIntegrator(rate=0.5),
                             stateful=False,
                             loggable=False)

        #
    @tc.typecheck
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(tc.any(list, dict)) = None,
                 function=None,
                 output_ports: tc.optional(tc.any(str, Iterable)) = None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):
        """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__
        """

        super(IntegratorMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             function=function,
                             params=params,
                             name=name,
                             prefs=prefs,
                             input_ports=input_ports,
                             output_ports=output_ports,
                             **kwargs)

        # IMPLEMENT: INITIALIZE LOG ENTRIES, NOW THAT ALL PARTS OF THE MECHANISM HAVE BEEN INSTANTIATED

    # def _parse_function_variable(self, variable, context=None, context=None):
    #     super()._parse_function_variable(variable, context, context)

    def _handle_default_variable(self,
                                 default_variable=None,
                                 size=None,
                                 input_ports=None,
                                 function=None,
                                 params=None):
        """If any parameters with len>1 have been specified for the Mechanism's function, and Mechanism's
        default_variable has not been specified, reshape Mechanism's variable to match function's,
        but make sure function's has the same outer dimensionality as the Mechanism's
        """

        # Get variable for Mechanism
        user_specified = False
        if default_variable is not None:
            variable = np.atleast_1d(default_variable)
            user_specified = True
        else:
            variable = self.parameters.variable.default_value
            user_specified = self.parameters.variable._user_specified

        # Only bother if an instantiated function was specified for the Mechanism
        if isinstance(function, Function):
            function_variable = function.parameters.variable.default_value
            function_variable_len = function_variable.shape[-1]
            variable_len = variable.shape[-1]

            # Raise error if:
            # - the length of both Mechanism and function variable are greater than 1 and they don't match, or
            # - the Mechanism's variable length is 1 and the function's is > 1 (in which case would like to assign
            #   shape of function's variable to that of Mechanism) but Mechanism's variable is user-specified.
            if ((variable_len > 1 and function_variable_len > 1
                 and variable_len != function_variable_len)
                    or (function_variable_len > 1 and variable_len == 1
                        and user_specified)):
                raise IntegratorMechanismError(
                    f"Shape of {repr(VARIABLE)} for function specified for {self.name} "
                    f"({function.name}: {function.variable.shape}) does not match "
                    f"the shape of the {repr(DEFAULT_VARIABLE)} specified for the "
                    f"{repr(Mechanism.__name__)}.")

            # If length of Mechanism's variable is 1 but the function's is longer,
            #     reshape Mechanism's inner dimension to match function
            elif variable_len == 1 and function_variable_len > 1:
                variable_shape = list(variable.shape)
                variable_shape[-1] = function_variable.shape[-1]
                # self.parameters.variable.default_value = np.zeros(tuple(variable_shape))
                variable = np.zeros(tuple(variable_shape))

            # IMPLEMENTATON NOTE:
            #    Don't worry about case in which length of function's variable is 1 and Mechanism's is > 1
            #    as the reshaping of the function's variable will be taken care of in _instantiate_function

        return super()._handle_default_variable(default_variable=variable,
                                                size=size,
                                                input_ports=input_ports,
                                                function=function,
                                                params=params)
Пример #22
0
TYPE_DEFAULT_PREFERENCES = 'TypeDefaultPreferences'
SUBTYPE_DEFAULT_PREFERENCES = 'SubtypeDefaultPreferences'
INSTANCE_DEFAULT_PREFERENCES = 'InstanceDefaultPreferences'

# Level default preferences dicts:

BasePreferenceSetPrefs = {
    VERBOSE_PREF, PARAM_VALIDATION_PREF, REPORT_OUTPUT_PREF, LOG_PREF,
    DELIVERY_PREF, RUNTIME_PARAM_MODULATION_PREF
}

CompositionDefaultPreferencesDict = {
    PREFERENCE_SET_NAME:
    COMPOSITION_DEFAULT_PREFERENCES,
    VERBOSE_PREF:
    PreferenceEntry(False, PreferenceLevel.COMPOSITION),
    PARAM_VALIDATION_PREF:
    PreferenceEntry(True, PreferenceLevel.COMPOSITION),
    REPORT_OUTPUT_PREF:
    PreferenceEntry(False, PreferenceLevel.COMPOSITION),
    LOG_PREF:
    PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    DELIVERY_PREF:
    PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
    RUNTIME_PARAM_MODULATION_PREF:
    PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.COMPOSITION)
}

CategoryDefaultPreferencesDict = {
    PREFERENCE_SET_NAME:
    CATEGORY_DEFAULT_PREFERENCES,
Пример #23
0
def test_EVC_gratton():
    def test_search_function(controller=None, **kwargs):
        result = np.array(controller.allocationPolicy).reshape(
            len(controller.allocationPolicy), -1)
        return result

    def test_outcome_function(**kwargs):
        result = np.array([0])
        return result

    # Preferences:
    mechanism_prefs = ComponentPreferenceSet(
        prefs={
            kpVerbosePref: PreferenceEntry(False, PreferenceLevel.INSTANCE),
            kpReportOutputPref: PreferenceEntry(False,
                                                PreferenceLevel.INSTANCE)
        })

    process_prefs = ComponentPreferenceSet(
        reportOutput_pref=PreferenceEntry(False, PreferenceLevel.INSTANCE),
        verbose_pref=PreferenceEntry(True, PreferenceLevel.INSTANCE))

    # Control Parameters
    signalSearchRange = np.arange(1.0, 2.0, 0.2)

    # Stimulus Mechanisms
    Target_Stim = TransferMechanism(name='Target Stimulus',
                                    function=Linear(slope=0.3324))
    Flanker_Stim = TransferMechanism(name='Flanker Stimulus',
                                     function=Linear(slope=0.3545221843))

    # Processing Mechanisms (Control)
    Target_Rep = TransferMechanism(
        name='Target Representation',
        function=Linear(
            slope=(1.0,
                   ControlProjection(function=Linear,
                                     control_signal_params={
                                         ALLOCATION_SAMPLES: signalSearchRange
                                     }))),
        prefs=mechanism_prefs)
    Flanker_Rep = TransferMechanism(
        name='Flanker Representation',
        function=Linear(
            slope=(1.0,
                   ControlProjection(function=Linear,
                                     control_signal_params={
                                         ALLOCATION_SAMPLES: signalSearchRange
                                     }))),
        prefs=mechanism_prefs)

    # Processing Mechanism (Automatic)
    Automatic_Component = TransferMechanism(name='Automatic Component',
                                            function=Linear(slope=(1.0)),
                                            prefs=mechanism_prefs)

    # Decision Mechanisms
    Decision = DDM(
        function=DriftDiffusionAnalytical(drift_rate=(1.0),
                                          threshold=(0.2645),
                                          noise=(0.5),
                                          starting_point=(0),
                                          t0=0.15),
        prefs=mechanism_prefs,
        name='Decision',
        output_states=[
            DECISION_VARIABLE, RESPONSE_TIME, PROBABILITY_UPPER_THRESHOLD
        ],
    )

    # Outcome Mechanisms:
    Reward = TransferMechanism(name='Reward')

    # Processes:
    TargetControlProcess = Process(default_variable=[0],
                                   pathway=[Target_Stim, Target_Rep, Decision],
                                   prefs=process_prefs,
                                   name='Target Control Process')

    FlankerControlProcess = Process(
        default_variable=[0],
        pathway=[Flanker_Stim, Flanker_Rep, Decision],
        prefs=process_prefs,
        name='Flanker Control Process')

    TargetAutomaticProcess = Process(
        default_variable=[0],
        pathway=[Target_Stim, Automatic_Component, Decision],
        prefs=process_prefs,
        name='Target Automatic Process')

    FlankerAutomaticProcess = Process(
        default_variable=[0],
        pathway=[Flanker_Stim, Automatic_Component, Decision],
        prefs=process_prefs,
        name='Flanker1 Automatic Process')

    RewardProcess = Process(default_variable=[0],
                            pathway=[Reward],
                            prefs=process_prefs,
                            name='RewardProcess')

    # System:
    mySystem = System(
        processes=[
            TargetControlProcess, FlankerControlProcess,
            TargetAutomaticProcess, FlankerAutomaticProcess, RewardProcess
        ],
        controller=EVCControlMechanism,
        enable_controller=True,
        monitor_for_control=[
            Reward, (Decision.PROBABILITY_UPPER_THRESHOLD, 1, -1)
        ],
        # monitor_for_control=[Reward, DDM_PROBABILITY_UPPER_THRESHOLD, (DDM_RESPONSE_TIME, -1, 1)],
        name='EVC Gratton System')

    mySystem.recordSimulationPref = True

    # Show characteristics of system:
    mySystem.show()
    mySystem.controller.show()

    # mySystem.show_graph(show_control=True)

    # configure EVC components
    mySystem.controller.control_signals[
        0].intensity_cost_function = Exponential(rate=0.8046).function
    mySystem.controller.control_signals[
        1].intensity_cost_function = Exponential(rate=0.8046).function

    for mech in mySystem.controller.prediction_mechanisms.mechanisms:
        if mech.name == 'Flanker Stimulus Prediction Mechanism' or mech.name == 'Target Stimulus Prediction Mechanism':
            # when you find a key mechanism (transfer mechanism) with the correct name, print its name
            print(mech.name)
            mech.function.rate = 1.0

        if 'Reward' in mech.name:
            print(mech.name)
            mech.function.rate = 1.0
            # mySystem.controller.prediction_mechanisms[mech].parameterStates['rate'].base_value = 1.0

    print('new rate of integration mechanisms before System execution:')
    # for mech in mySystem.controller.prediction_mechanisms.keys():
    for mech in mySystem.controller.prediction_mechanisms.mechanisms:
        print(mech.name)
        print(mech.function.rate)
        print('----')

    # generate stimulus environment

    nTrials = 3
    targetFeatures = [1, 1, 1]
    flankerFeatures = [1, -1,
                       1]  # for full simulation: flankerFeatures = [-1,1]
    reward = [100, 100, 100]

    targetInputList = targetFeatures
    flankerInputList = flankerFeatures
    rewardList = reward

    # targetInputList = np.random.choice(targetFeatures, nTrials).tolist()
    # flankerInputList = np.random.choice(flankerFeatures, nTrials).tolist()
    # rewardList = (np.ones(nTrials) * reward).tolist() #np.random.choice(reward, nTrials).tolist()

    stim_list_dict = {
        Target_Stim: targetInputList,
        Flanker_Stim: flankerInputList,
        Reward: rewardList
    }

    mySystem.controller.reportOutputPref = True

    expected_results_array = [
        0.2645, 0.32257752863413636, 0.9481940753514433, 100., 0.2645,
        0.42963678062444666, 0.47661180945923376, 100., 0.2645,
        0.300291026852769, 0.97089165101931, 100.
    ]

    expected_sim_results_array = [
        0.2645,
        0.32257753,
        0.94819408,
        100.,
        0.2645,
        0.31663196,
        0.95508757,
        100.,
        0.2645,
        0.31093566,
        0.96110142,
        100.,
        0.2645,
        0.30548947,
        0.96633839,
        100.,
        0.2645,
        0.30029103,
        0.97089165,
        100.,
        0.2645,
        0.3169957,
        0.95468427,
        100.,
        0.2645,
        0.31128378,
        0.9607499,
        100.,
        0.2645,
        0.30582202,
        0.96603252,
        100.,
        0.2645,
        0.30060824,
        0.9706259,
        100.,
        0.2645,
        0.29563774,
        0.97461444,
        100.,
        0.2645,
        0.31163288,
        0.96039533,
        100.,
        0.2645,
        0.30615555,
        0.96572397,
        100.,
        0.2645,
        0.30092641,
        0.97035779,
        100.,
        0.2645,
        0.2959409,
        0.97438178,
        100.,
        0.2645,
        0.29119255,
        0.97787196,
        100.,
        0.2645,
        0.30649004,
        0.96541272,
        100.,
        0.2645,
        0.30124552,
        0.97008732,
        100.,
        0.2645,
        0.29624499,
        0.97414704,
        100.,
        0.2645,
        0.29148205,
        0.97766847,
        100.,
        0.2645,
        0.28694892,
        0.98071974,
        100.,
        0.2645,
        0.30156558,
        0.96981445,
        100.,
        0.2645,
        0.29654999,
        0.97391021,
        100.,
        0.2645,
        0.29177245,
        0.97746315,
        100.,
        0.2645,
        0.28722523,
        0.98054192,
        100.,
        0.2645,
        0.28289958,
        0.98320731,
        100.,
        0.2645,
        0.42963678,
        0.47661181,
        100.,
        0.2645,
        0.42846471,
        0.43938586,
        100.,
        -0.2645,
        0.42628176,
        0.40282965,
        100.,
        0.2645,
        0.42314468,
        0.36732207,
        100.,
        -0.2645,
        0.41913221,
        0.333198,
        100.,
        0.2645,
        0.42978939,
        0.51176048,
        100.,
        0.2645,
        0.42959394,
        0.47427693,
        100.,
        -0.2645,
        0.4283576,
        0.43708106,
        100.,
        0.2645,
        0.4261132,
        0.40057958,
        100.,
        -0.2645,
        0.422919,
        0.36514906,
        100.,
        0.2645,
        0.42902209,
        0.54679323,
        100.,
        0.2645,
        0.42980788,
        0.50942101,
        100.,
        -0.2645,
        0.42954704,
        0.47194318,
        100.,
        -0.2645,
        0.42824656,
        0.43477897,
        100.,
        0.2645,
        0.42594094,
        0.3983337,
        100.,
        -0.2645,
        0.42735293,
        0.58136855,
        100.,
        -0.2645,
        0.42910149,
        0.54447221,
        100.,
        0.2645,
        0.42982229,
        0.50708112,
        100.,
        -0.2645,
        0.42949608,
        0.46961065,
        100.,
        -0.2645,
        0.42813159,
        0.43247968,
        100.,
        -0.2645,
        0.42482049,
        0.61516258,
        100.,
        0.2645,
        0.42749136,
        0.57908829,
        100.,
        0.2645,
        0.42917687,
        0.54214925,
        100.,
        -0.2645,
        0.42983261,
        0.50474093,
        100.,
        -0.2645,
        0.42944107,
        0.46727945,
        100.,
        0.2645,
        0.32257753,
        0.94819408,
        100.,
        0.2645,
        0.31663196,
        0.95508757,
        100.,
        0.2645,
        0.31093566,
        0.96110142,
        100.,
        0.2645,
        0.30548947,
        0.96633839,
        100.,
        0.2645,
        0.30029103,
        0.97089165,
        100.,
        0.2645,
        0.3169957,
        0.95468427,
        100.,
        0.2645,
        0.31128378,
        0.9607499,
        100.,
        0.2645,
        0.30582202,
        0.96603252,
        100.,
        0.2645,
        0.30060824,
        0.9706259,
        100.,
        0.2645,
        0.29563774,
        0.97461444,
        100.,
        0.2645,
        0.31163288,
        0.96039533,
        100.,
        0.2645,
        0.30615555,
        0.96572397,
        100.,
        0.2645,
        0.30092641,
        0.97035779,
        100.,
        0.2645,
        0.2959409,
        0.97438178,
        100.,
        0.2645,
        0.29119255,
        0.97787196,
        100.,
        0.2645,
        0.30649004,
        0.96541272,
        100.,
        0.2645,
        0.30124552,
        0.97008732,
        100.,
        0.2645,
        0.29624499,
        0.97414704,
        100.,
        0.2645,
        0.29148205,
        0.97766847,
        100.,
        0.2645,
        0.28694892,
        0.98071974,
        100.,
        0.2645,
        0.30156558,
        0.96981445,
        100.,
        0.2645,
        0.29654999,
        0.97391021,
        100.,
        0.2645,
        0.29177245,
        0.97746315,
        100.,
        0.2645,
        0.28722523,
        0.98054192,
        100.,
        0.2645,
        0.28289958,
        0.98320731,
        100.,
    ]

    mySystem.run(
        num_trials=nTrials,
        inputs=stim_list_dict,
    )

    np.testing.assert_allclose(
        pytest.helpers.expand_np_ndarray(mySystem.results),
        expected_results_array,
        atol=1e-08,
        verbose=True,
    )

    np.testing.assert_allclose(
        pytest.helpers.expand_np_ndarray(mySystem.simulation_results),
        expected_sim_results_array,
        atol=1e-08,
        verbose=True,
    )
Пример #24
0
class LeabraMechanism(ProcessingMechanism_Base):
    """
    LeabraMechanism(                \
    network=None,                   \
    input_size=1,                   \
    output_size=1,                  \
    hidden_layers=0,                \
    hidden_sizes=None,              \
    training_flag=False,            \
    params=None,                    \
    name=None,                      \
    prefs=None)

    Subclass of `ProcessingMechanism` that is a wrapper for a Leabra network in PsyNeuLink.
    See `Mechanism <Mechanism_Class_Reference>` for additional arguments and attributes.

    Arguments
    ---------

    network : Optional[leabra.Network]
        a network object from the leabra package. If specified, the LeabraMechanism's network becomes **network**,
        and the other arguments that specify the network are ignored (**input_size**, **output_size**,
        **hidden_layers**, **hidden_sizes**).

    input_size : int : default 1
        an integer specifying how many units are in (the size of) the first layer (input) of the leabra network.

    output_size : int : default 1
        an integer specifying how many units are in (the size of) the final layer (output) of the leabra network.

    hidden_layers : int : default 0
        an integer specifying how many hidden layers are in the leabra network.

    hidden_sizes : int or List[int] : default input_size
        if specified, this should be a list of integers, specifying the size of each hidden layer. If **hidden_sizes**
        is a list, the number of integers in **hidden_sizes** should be equal to the number of hidden layers. If not
        specified, hidden layers will default to the same size as the input layer. If hidden_sizes is a single integer,
        then all hidden layers are of that size.

    training_flag : boolean : default None
        a boolean specifying whether the leabra network should be learning. If True, the leabra network will adjust
        its weights using the "leabra" algorithm, based on the training pattern (which is read from its second output
        state). The `training_flag` attribute can be changed after initialization, causing the leabra network to
        start/stop learning. If None, `training_flag` will default to False if **network** argument is not provided.
        If **network** argument is provided and `training_flag` is None, then the existing learning rules of the
        **network** will be preserved.

    quarter_size : int : default 50
        an integer specifying how many times the Leabra network cycles each time it is run. Lower values of
        quarter_size result in shorter execution times, though very low values may cause slight fluctuations in output.
        Lower values of quarter_size also effectively reduce the magnitude of learning weight changes during
        a given trial.

    Attributes
    ----------

    function : LeabraFunction
        the function that wraps and executes the leabra mechanism

    input_size : int : default 1
        an integer specifying how many units are in (the size of) the first layer (input) of the leabra network.

    output_size : int : default 1
        an integer specifying how many units are in (the size of) the final layer (output) of the leabra network.

    hidden_layers : int : default 0
        an integer specifying how many hidden layers are in the leabra network.

    hidden_sizes : int or List[int] : default input_size
        an integer or list of integers, specifying the size of each hidden layer.

    training_flag : boolean
        a boolean specifying whether the leabra network should be learning. If True, the leabra network will adjust
        its weights using the "leabra" algorithm, based on the training pattern (which is read from its second output
        state). The `training_flag` attribute can be changed after initialization, causing the leabra network to
        start/stop learning.

    quarter_size : int : default 50
        an integer specifying how many times the Leabra network cycles each time it is run. Lower values of
        quarter_size result in shorter execution times, though very low values may cause slight fluctuations in output.
        Lower values of quarter_size also effectively reduce the magnitude of learning weight changes during
        a given trial.

    network : leabra.Network
        the leabra.Network object which is executed by the LeabraMechanism. For more info about leabra Networks,
        please see the `leabra package <https://github.com/benureau/leabra>` on Github.

    Returns
    -------
    instance of LeabraMechanism : LeabraMechanism
    """

    componentType = LEABRA_MECHANISM

    is_self_learner = True  # CW 11/27/17: a flag; "True" if the mechanism self-learns. Declared in ProcessingMechanism

    classPreferenceLevel = PreferenceLevel.SUBTYPE
    # These will override those specified in TYPE_DEFAULT_PREFERENCES
    classPreferences = {
        PREFERENCE_SET_NAME: 'TransferCustomClassPreferences',
        REPORT_OUTPUT_PREF: PreferenceEntry(False, PreferenceLevel.INSTANCE)
    }

    # LeabraMechanism parameter and control signal assignments):

    class Parameters(ProcessingMechanism_Base.Parameters):
        """
            Attributes
            ----------

                hidden_layers
                    see `hidden_layers <LeabraMechanism.hidden_layers>`

                    :default value: 0
                    :type: ``int``

                hidden_sizes
                    see `hidden_sizes <LeabraMechanism.hidden_sizes>`

                    :default value: None
                    :type:

                input_size
                    see `input_size <LeabraMechanism.input_size>`

                    :default value: 1
                    :type: ``int``

                network
                    see `network <LeabraMechanism.network>`

                    :default value: None
                    :type:

                output_size
                    see `output_size <LeabraMechanism.output_size>`

                    :default value: 1
                    :type: ``int``

                quarter_size
                    see `quarter_size <LeabraMechanism.quarter_size>`

                    :default value: 50
                    :type: ``int``

                training_flag
                    see `training_flag <LeabraMechanism.training_flag>`

                    :default value: None
                    :type:
        """
        input_size = 1
        output_size = 1
        hidden_layers = 0
        hidden_sizes = None
        quarter_size = 50

        function = Parameter(LeabraFunction, stateful=False, loggable=False)

        network = FunctionParameter(None)
        training_flag = Parameter(False, setter=_training_flag_setter)

    def __init__(self,
                 network=None,
                 input_size=None,
                 output_size=None,
                 hidden_layers=None,
                 hidden_sizes=None,
                 training_flag=None,
                 quarter_size=None,
                 params=None,
                 name=None,
                 prefs=None):

        if network is not None:
            input_size = len(network.layers[0].units)
            output_size = len(network.layers[-1].units)
            hidden_layers = len(network.layers) - 2
            hidden_sizes = list(map(lambda x: len(x.units),
                                    network.layers))[1:-2]
            quarter_size = network.spec.quarter_size
            training_flag = infer_training_flag_from_network(network)
        else:
            if hidden_sizes is None:
                hidden_sizes = input_size

            # don't directly assign defaults to their corresponding variable
            # because that may cause their parameter to be incorrectly assigned
            # _user_specified=True
            network = build_leabra_network(
                input_size
                if input_size is not None else self.class_defaults.input_size,
                output_size if output_size is not None else
                self.class_defaults.output_size,
                hidden_layers if hidden_layers is not None else
                self.class_defaults.hidden_layers,
                hidden_sizes if hidden_sizes is not None else
                self.class_defaults.hidden_sizes,
                training_flag if training_flag is not None else
                self.class_defaults.training_flag,
                quarter_size if quarter_size is not None else
                self.class_defaults.quarter_size,
            )

        size = [
            input_size if input_size is not None else
            self.class_defaults.input_size, output_size
            if output_size is not None else self.class_defaults.output_size
        ]

        super().__init__(
            size=size,
            network=network,
            input_size=input_size,
            output_size=output_size,
            hidden_layers=hidden_layers,
            hidden_sizes=hidden_sizes,
            quarter_size=quarter_size,
            training_flag=training_flag,
            params=params,
            name=name,
            prefs=prefs,
        )

    def _execute(
        self,
        variable=None,
        context=None,
        runtime_params=None,
        time_scale=TimeScale.TRIAL,
    ):

        if runtime_params:
            if "training_flag" in runtime_params.keys():
                self.parameters.training_flag._set(
                    runtime_params["training_flag"], context)
                del runtime_params["training_flag"]

        return super()._execute(
            variable=variable,
            context=context,
            runtime_params=runtime_params,
        )
Пример #25
0
class BasePreferenceSet(PreferenceSet):
    # DOCUMENT: FOR EACH pref TO BE ACCESSIBLE DIRECTLY AS AN ATTRIBUTE OF AN OBJECT,
    #           MUST IMPLEMENT IT AS PROPERTY (WITH GETTER AND SETTER METHODS) IN FUNCTION MODULE
    """Implement and manage PreferenceSets for Component class hierarchy

    Description:
        Implement the following preferences:
            - verbose (bool): enables/disables reporting of (non-exception) warnings and system function
            - paramValidation (bool):  enables/disables run-time validation of the execute method of a Function object
            - reportOutput ([bool, str]): enables/disables reporting of execution of execute method:
              True prints input/output, 'params' or 'parameters' includes parameter values
            - log (bool): sets LogCondition for a given Component
            - functionRunTimeParams (Modulation): uses run-time params to modulate execute method params
        Implement the following preference levels:
            - SYSTEM: System level default settings (Function.classPreferences)
            - CATEGORY: category-level default settings:
                Mechanism.classPreferences
                Port.classPreferences
                Projection.classPreferences
                Function.classPreferences
            - TYPE: type-level default settings (if one exists for the category, else category-level settings are used):
                MechanismTypes:
                    ControlMechanism.classPreferences
                    ProcessingMechanism.classPreferences
                Port types:
                    InputPort.classPreferences
                    ParameterPort.classPreferences
                    OutputPort.classPreferences
                Projection types:
                    ControlProjection.classPreferences
                    MappingProjection.classPreferences
            - SUBTYPE: subtype-level default settings (if one exists for the type, else type-level settings are used):
                ControlMechanism subtypes:
                    DefaultControlMechanism.classPreferences
                    EVCControlMechanism.classPreferences
                ProcessingMechanism subtypes:
                    DDM.classPreferences
                    Linear.classPreferences
                    SigmoidLayer.classPreferences
                    IntegratorMechanism.classPreferences
            - INSTANCE: returns the setting specified in the PreferenceSetEntry of the specified object itself

    Initialization arguments:
        - owner (Function object): object to which the PreferenceSet belongs;  (default: DefaultProcessingMechanism)
            Note:  this is used to get appropriate default preferences (from class) for instantiation;
                   however, since a PreferenceSet can be assigned to multiple objects, when accessing the preference
                   the owner is set dynamically, to insure context-relevant PreferenceLevels for returning the setting
        - prefs (dict):  a specification dict, each entry of which must have a:
            key that is a keypath (PREFERENCE_KEYWORD<*>) corresponding to an attribute of the PreferenceSet, from the following set:
                + PREFERENCE_SET_NAME: specifies the name of the PreferenceSet
                + VERBOSE_PREF: print non-exception-related information during execution
                + PARAM_VALIDATION_PREF: validate parameters during execution
                + REPORT_OUTPUT_PREF: report object's ouptut during execution
                + LOG_PREF: record attribute data for the object during execution
                + DELIVERY_PREF: add attribute data to context rpc pipeline for delivery to external applications
                + RUNTIME_PARAM_MODULATION_PREF: modulate parameters using runtime specification (in pathway)
            value that is either a PreferenceSet, valid setting for the preference, or a PreferenceLevel; defaults
        - level (PreferenceLevel): ??
        - name (str): name of PreferenceSet
        - context (value): must be self (to call super's abstract class: PreferenceSet)
        - **kargs (dict): dictionary of arguments, that takes precedence over the individual args above

    Class attributes:
        + defaultPreferencesDict (PreferenceSet): SystemDefaultPreferences
        + baseClass (class): Function

    Class methods:
        Note:
        * All of the setters below use PreferenceSet.set_preference, which validates any preference info passed to it,
            and can take a PreferenceEntry, setting, or PreferenceLevel
        - verbosePref():
            returns setting for verbosePref preference at level specified in verbosePref PreferenceEntry of
             owner's PreferenceSet
        - verbosePref(setting=<value>):
            assigns the value of the setting arg to the verbosePref of the owner's PreferenceSet
        - paramValidationPref():
            returns setting for paramValidationPref preference at level specified in paramValidationPref PreferenceEntry
            of owner's PreferenceSet
        - paramValidationPref(setting=<value>):
            assigns the value of the setting arg to the paramValidationPref of the owner's PreferenceSet
        - reportOutputPref():
            returns setting for reportOutputPref preference at level specified in reportOutputPref PreferenceEntry
            of owner's Preference object
        - reportOutputPref(setting=<value>):
            assigns the value of the setting arg to the reportOutputPref of the owner's PreferenceSet
        - logPref():
            returns setting for log preference at level specified in log PreferenceEntry of owner's Preference object
        - logPref(setting=<value>):
            assigns the value of the setting arg to the logPref of the owner's PreferenceSet
                and, if it contains log entries, it adds them to the owner's log
        - runtimeParamModulationPref():
            returns setting for runtimeParamModulation preference at level specified in
             runtimeParamModulation PreferenceEntry of owner's Preference object
        - runtimeParamModulationPref(setting=<value>):
            assigns the value of the setting arg to the runtimeParamModulationPref of the owner's Preference object
    """

    # Use this as both:
    # - a template for the type of each preference (used for validation)
    # - a default set of preferences (where defaults are not otherwise specified)
    defaultPreferencesDict = {
        PREFERENCE_SET_NAME:
        'BasePreferenceSetDefaults',
        VERBOSE_PREF:
        PreferenceEntry(False, PreferenceLevel.COMPOSITION),
        PARAM_VALIDATION_PREF:
        PreferenceEntry(True, PreferenceLevel.COMPOSITION),
        REPORT_OUTPUT_PREF:
        PreferenceEntry(True, PreferenceLevel.COMPOSITION),
        LOG_PREF:
        PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
        DELIVERY_PREF:
        PreferenceEntry(LogCondition.OFF, PreferenceLevel.CATEGORY),
        RUNTIME_PARAM_MODULATION_PREF:
        PreferenceEntry(Modulation.MULTIPLY, PreferenceLevel.COMPOSITION)
    }

    baseClass = None

    def __init__(self,
                 owner=None,
                 prefs=None,
                 level=PreferenceLevel.COMPOSITION,
                 name=None,
                 **kargs):
        """Instantiate PreferenceSet for owner and/or classPreferences for owner's class

        If owner is a class, instantiate its classPreferences attribute if that does not already exist,
            using its classPreferenceLevel attribute, and the corresponding preference dict in ComponentDefaultPrefDicts
        If owner is an object:
        - if the owner's classPreferences do not yet exist, instantiate it (as described above)
        - use the owner's <class>.classPreferenceLevel to create a base set of preferences from its classPreferences
        - use PreferenceEntries, settings, or level specifications from dict in prefs arg to replace entries in base set
        If owner is omitted:
        - assigns DefaultProcessingMechanism as owner (this is updated if PreferenceSet is assigned to another object)

        :param owner:
        :param prefs:
        :param level:
        :param name:
        :param context:
        :param kargs:
        """
        if kargs:
            try:
                owner = kargs[PREFS_OWNER]
            except (KeyError, NameError):
                pass
            try:
                prefs = kargs[PREFS]
            except (KeyError, NameError):
                pass
            try:
                name = kargs[NAME]
            except (KeyError, NameError):
                pass
            try:
                level = kargs[PREF_LEVEL]
            except (KeyError, NameError):
                pass

        # If baseClass has not been assigned, do so here:
        if self.baseClass is None:
            from psyneulink.core.components.component import Component
            self.baseClass = Component

        # If owner is not specified, assign DefaultProcessingMechanism_Base as default owner
        if owner is None:
            from psyneulink.core.components.mechanisms.processing.defaultprocessingmechanism import DefaultProcessingMechanism_Base
            DefaultPreferenceSetOwner = DefaultProcessingMechanism_Base(
                name=DEFAULT_PREFERENCE_SET_OWNER)
            owner = DefaultPreferenceSetOwner

        # Get class
        if inspect.isclass(owner):
            owner_class = owner
        else:
            owner_class = owner.__class__

        # If classPreferences have not be instantiated for owner's class, do so here:
        try:
            # If classPreferences are still a dict, they need to be instantiated as a BasePreferenceSet
            if isinstance(owner_class.classPreferences, dict):
                raise AttributeError
        except AttributeError:
            if inspect.isclass(owner):
                # If this is a call to instantiate the classPreferences, no need to keep doing it! (infinite recursion)
                pass
            else:
                # Instantiate the classPreferences
                owner_class.classPreferences = BasePreferenceSet(
                    owner=owner_class,
                    level=owner_class.classPreferenceLevel,
                    prefs=ComponentDefaultPrefDicts[
                        owner_class.classPreferenceLevel],
                    name=name,
                )
        # Instantiate PreferenceSet
        super().__init__(
            owner=owner,
            level=owner_class.classPreferenceLevel,
            prefs=prefs,
            name=name,
        )
        self._level = level

    @property
    def verbosePref(self):
        """Return setting of owner's verbosePref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(VERBOSE_PREF,
                                               self._verbose_pref.level)[0]

    @verbosePref.setter
    def verbosePref(self, setting):
        """Assign setting to owner's verbosePref
        :param setting:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=VERBOSE_PREF)

    @property
    def paramValidationPref(self):
        """Return setting of owner's param_validationPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively call base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(
            PARAM_VALIDATION_PREF, self._param_validation_pref.level)[0]

    @paramValidationPref.setter
    def paramValidationPref(self, setting):
        """Assign setting to owner's param_validationPref
        :param setting:
        :return:
        """
        self.set_preference(setting, PARAM_VALIDATION_PREF)

    @property
    def reportOutputPref(self):
        """Return setting of owner's reportOutputPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls super (closer to base) classes to get preference at specified level
        return self.get_pref_setting_for_level(
            REPORT_OUTPUT_PREF, self._report_output_pref.level)[0]

    @reportOutputPref.setter
    def reportOutputPref(self, setting):
        """Assign setting to owner's reportOutputPref
        :param setting:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=REPORT_OUTPUT_PREF)

    @property
    def logPref(self):
        """Return setting of owner's logPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(LOG_PREF,
                                               self._log_pref.level)[0]

    @property
    def _deliveryPref(self):
        """Return setting of owner's _deliveryPref at level specified in its PreferenceEntry.level
        :param level:
        :return:
        """
        # If the level of the object is below the Preference level,
        #    recursively calls base (super) classes to get preference at specified level
        return self.get_pref_setting_for_level(DELIVERY_PREF,
                                               self._delivery_pref.level)[0]

    # # VERSION THAT USES OWNER'S logPref TO LIST ENTRIES TO BE RECORDED
    # @logPref.setter
    # def logPref(self, setting):
    #     """Assign setting to owner's logPref and, if it has log entries, add them to owner's log
    #     :param setting:
    #     :return:
    #     """
    #
    #     entries, level = self.set_preference(candidate_info=setting, pref_ivar_name=LOG_PREF, [str, list])
    #
    #     if entries:
    #         # Add entries to owner's log
    #         from Globals.Log import Log
    #
    #         try:
    #             self.owner.log.add_entries(entries=entries)
    #         except AttributeError:
    #             self.owner.log = Log(owner=self, entries=entries)

    # VERSION THAT USES OWNER'S logPref AS RECORDING SWITCH
    @logPref.setter
    def logPref(self, setting):
        """Assign setting to owner's logPref
        :param setting:
        :return:
        """
        self.set_preference(candidate_info=setting, pref_ivar_name=LOG_PREF)

    @property
    def runtimeParamModulationPref(self):
        """Returns owner's runtimeParamModulationPref
        :return:
        """
        # return self._runtime_param_modulation_pref
        return self.get_pref_setting_for_level(
            RUNTIME_PARAM_MODULATION_PREF,
            self._runtime_param_modulation_pref.level)[0]

    @runtimeParamModulationPref.setter
    def runtimeParamModulationPref(self, setting):
        """Assign runtimeParamModulationPref
        :param entry:
        :return:
        """
        self.set_preference(candidate_info=setting,
                            pref_ivar_name=RUNTIME_PARAM_MODULATION_PREF)