示例#1
0
    def _validate_noise(self, noise):
        # Noise must be a scalar, list, array or Distribution Function

        if isinstance(noise, DistributionFunction):
            noise = noise.execute

        if isinstance(noise, (np.ndarray, list)):
            if len(noise) == 1:
                pass
            # Variable is a list/array
            elif (not iscompatible(np.atleast_2d(noise), self.defaults.variable)
                  and not iscompatible(np.atleast_1d(noise), self.defaults.variable) and len(noise) > 1):
                raise FunctionError(f"Noise parameter ({noise})  for '{self.name}' does not match default variable "
                                    f"({self.defaults.variable}); it must be specified as a float, a function, "
                                    f"or an array of the appropriate shape "
                                    f"({np.shape(np.array(self.defaults.variable))}).",
                    component=self)
            else:
                for i in range(len(noise)):
                    if isinstance(noise[i], DistributionFunction):
                        noise[i] = noise[i].execute
                    if (not np.isscalar(noise[i]) and not callable(noise[i])
                            and not iscompatible(np.atleast_2d(noise[i]), self.defaults.variable[i])
                            and not iscompatible(np.atleast_1d(noise[i]), self.defaults.variable[i])):
                        raise FunctionError(f"The element '{noise[i]}' specified in 'noise' for {self.name} "
                                             f"is not valid; noise must be list or array must be floats or functions.")
示例#2
0
    def _validate_noise(self, noise):
        # Noise is a list or array
        if isinstance(noise, (np.ndarray, list)):
            if len(noise) == 1:
                pass
            # Variable is a list/array
            elif (not iscompatible(np.atleast_2d(noise),
                                   self.defaults.variable)
                  and not iscompatible(np.atleast_1d(noise),
                                       self.defaults.variable)
                  and len(noise) > 1):
                raise FunctionError(
                    "Noise parameter ({}) does not match default variable ({}). Noise parameter of {} "
                    "must be specified as a float, a function, or an array of the appropriate shape ({})."
                    .format(noise, self.defaults.variable, self.name,
                            np.shape(np.array(self.defaults.variable))))
            else:
                for i in range(len(noise)):
                    if isinstance(noise[i], DistributionFunction):
                        noise[i] = noise[i].execute
                    # if not isinstance(noise[i], (float, int)) and not callable(noise[i]):
                    if not np.isscalar(noise[i]) and not callable(noise[i]):
                        raise FunctionError(
                            "The elements of a noise list or array must be scalars or functions. "
                            "{} is not a valid noise element for {}".format(
                                noise[i], self.name))

        # Otherwise, must be a float, int or function
        elif not isinstance(noise, (float, int)) and not callable(noise):
            raise FunctionError(
                "Noise parameter ({}) for {} must be a float, function, or array/list of these."
                .format(noise, self.name))
示例#3
0
    def _validate_rate(self, rate):
        # FIX: CAN WE JUST GET RID OF THIS?
        # kmantel: this duplicates much code in _validate_params above, but that calls _instantiate_defaults
        # which I don't think is the right thing to do here, but if you don't call it in _validate_params
        # then a lot of things don't get instantiated properly
        if rate is not None:
            if isinstance(rate, list):
                rate = np.asarray(rate)

            rate_type_msg = 'The rate parameter of {0} must be a number or an array/list of at most 1d (you gave: {1})'
            if isinstance(rate, np.ndarray):
                # kmantel: current test_gating test depends on 2d rate
                #   this should be looked at but for now this restriction is removed
                # if rate.ndim > 1:
                #     raise FunctionError(rate_type_msg.format(self.name, rate))
                pass
            elif not isinstance(rate, numbers.Number):
                raise FunctionError(rate_type_msg.format(self.name, rate))

            if isinstance(rate, np.ndarray) and not iscompatible(rate, self.defaults.variable):
                if len(rate) != 1 and len(rate) != np.array(self.defaults.variable).size:
                    if self._variable_shape_flexibility is DefaultsFlexibility.FLEXIBLE:
                        self.defaults.variable = np.zeros_like(np.array(rate))
                        if self.verbosePref:
                            warnings.warn(f"The length ({len(rate)}) of the array specified for the rate parameter "
                                          f"({rate}) of {self.name} must match the length "
                                          f"({np.array(self.defaults.variable).size}) of the default input "
                                          f"({self.defaults.variable}); the default input has been updated to match.")
                        self._instantiate_value()
                        self._variable_shape_flexibility = DefaultsFlexibility.INCREASE_DIMENSION
                    else:
                        raise FunctionError(f"The length of the array specified for the rate parameter of "
                                            f"{len(rate)} ({self.name}) must match the length of the default input "
                                            f"({np.array(self.defaults.variable).size}).")
示例#4
0
    def _validate_params(self, request_set, target_set=None, context=None):

        # Handle list or array for rate specification
        if RATE in request_set:
            rate = request_set[RATE]

            if isinstance(rate, (list, np.ndarray)) and not iscompatible(
                    rate, self.defaults.variable):
                if len(rate) != 1 and len(rate) != np.array(
                        self.defaults.variable).size:
                    # If the variable was not specified, then reformat it to match rate specification
                    #    and assign class_defaults.variable accordingly
                    # Note: this situation can arise when the rate is parametrized (e.g., as an array) in the
                    #       StatefulFunction's constructor, where that is used as a specification for a function parameter
                    #       (e.g., for an IntegratorMechanism), whereas the input is specified as part of the
                    #       object to which the function parameter belongs (e.g., the IntegratorMechanism); in that
                    #       case, the StatefulFunction gets instantiated using its class_defaults.variable ([[0]]) before
                    #       the object itself, thus does not see the array specification for the input.
                    if self._default_variable_flexibility is DefaultsFlexibility.FLEXIBLE:
                        self._instantiate_defaults(variable=np.zeros_like(
                            np.array(rate)),
                                                   context=context)
                        if self.verbosePref:
                            warnings.warn(
                                "The length ({}) of the array specified for the rate parameter ({}) of {} "
                                "must match the length ({}) of the default input ({});  "
                                "the default input has been updated to match".
                                format(len(rate), rate, self.name,
                                       np.array(self.defaults.variable).size),
                                self.defaults.variable,
                            )
                    else:
                        raise FunctionError(
                            "The length of the array specified for the rate parameter of {} ({}) "
                            "must match the length of the default input ({}).".
                            format(
                                self.name,
                                # rate,
                                len(rate),
                                np.array(self.defaults.variable).size,
                                # self.defaults.variable,
                            ))
                        # OLD:
                        # self.paramClassDefaults[RATE] = np.zeros_like(np.array(rate))

                        # KAM changed 5/15 b/c paramClassDefaults were being updated and *requiring* future integrator functions
                        # to have a rate parameter of type ndarray/list

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

        if NOISE in target_set:
            noise = target_set[NOISE]
            if isinstance(noise, DistributionFunction):
                noise.owner = self
                target_set[NOISE] = noise.execute
            self._validate_noise(target_set[NOISE])
示例#5
0
    def _instantiate_receiver(self, context=None):
        """Validate that receiver has been assigned and is compatible with the output of function

        Set learning_enabled to value of receiver if it was not otherwise specified in the constructor

        Notes:
        * _validate_params verifies that receiver is a parameterPort for the matrix parameter of a MappingProjection.
        * _super()._instantiate_receiver verifies that the projection has not already been assigned to the receiver.

        """

        super()._instantiate_receiver(context=context)

        # Insure that the learning_signal is compatible with the receiver's weight matrix
        if not iscompatible(self.defaults.value,
                            self.receiver.defaults.variable):
            raise LearningProjectionError(
                "The learning_signal of {} ({}) is not compatible with the matrix of "
                "the MappingProjection ({}) to which it is being assigned ({})"
                .format(self.name, self.defaults.value,
                        self.receiver.defaults.value,
                        self.receiver.owner.name))

        # Insure that learning_signal has the same shape as the receiver's weight matrix
        try:
            receiver_weight_matrix_shape = np.array(
                self.receiver.defaults.value).shape
        except TypeError:
            receiver_weight_matrix_shape = 1
        try:
            learning_signal_shape = np.array(self.defaults.value).shape
        except TypeError:
            learning_signal_shape = 1

        # FIX: SHOULD TEST WHETHER IT CAN BE USED, NOT WHETHER IT IS THE SAME SHAPE
        learning_mechanism = self.sender.owner
        learned_projection = self.receiver.owner

        # Set learning_enabled to value of its LearningMechanism sender if it was not specified in the constructor
        if self.learning_enabled is None:
            self.learning_enabled = self.parameters.learning_enabled.default_value = learning_mechanism.learning_enabled

        learned_projection.learning_mechanism = learning_mechanism
        learned_projection.has_learning_projection = self
示例#6
0
    def _validate_params(self, request_set, target_set=None, context=None):

        # Handle list or array for rate specification
        if RATE in request_set:
            rate = request_set[RATE]

            if isinstance(rate, (list, np.ndarray)) and not iscompatible(rate, self.defaults.variable):
                if len(rate) != 1 and len(rate) != np.array(self.defaults.variable).size:
                    # If the variable was not specified, then reformat it to match rate specification
                    #    and assign class_defaults.variable accordingly
                    # Note: this situation can arise when the rate is parametrized (e.g., as an array) in the
                    #       StatefulFunction's constructor, where that is used as specification for a function parameter
                    #       (e.g., for an IntegratorMechanism), whereas the input is specified as part of the
                    #       object to which the function parameter belongs (e.g., the IntegratorMechanism); in that
                    #       case, the StatefulFunction gets instantiated using its class_defaults.variable ([[0]])
                    #       before the object itself, thus does not see the array specification for the input.
                    if self._variable_shape_flexibility is DefaultsFlexibility.FLEXIBLE:
                        self._instantiate_defaults(variable=np.zeros_like(np.array(rate)), context=context)
                        if self.verbosePref:
                            warnings.warn(f"The length ({len(rate)}) of the array specified for "
                                          f"the rate parameter ({rate}) of {self.name} must match the length "
                                          f"({np.array(self.defaults.variable).size}) of the default input "
                                          f"({self.defaults.variable}); the default input has been updated to match.")
                    else:
                        raise FunctionError(f"The length of the array specified for the rate parameter of {self.name}"
                                            f"({len(rate)}) must match the length of the default input "
                                            f"({np.array(self.defaults.variable).size}).")

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

        if NOISE in target_set:
            noise = target_set[NOISE]
            if isinstance(noise, DistributionFunction):
                noise.owner = self
                target_set[NOISE] = noise.execute
            self._validate_noise(target_set[NOISE])
示例#7
0
    def validate_setting(self, candidate_setting, reference_setting,
                         pref_ivar_name):
        """Validate candidate_setting by checking against reference_setting and, if a log_entry, its type

        :param candidate_setting:
        :param reference_setting:
        :return:
        """
        # from Globals.Preferences.BasePreferenceSet import LOG_PREF
        # if pref_ivar_name is LOG_PREF:
        #     self.validate_log(candidate_setting, self)

        setting_OK = iscompatible(candidate_setting, reference_setting,
                                  **{kwCompatibilityType: Enum})
        # setting_OK = iscompatible(candidate_setting, reference_setting)

        # if not setting_OK and (isinstance(candidate_setting, Enum) or isinstance(reference_setting, Enum)):
        #     if isinstance(candidate_setting, Enum):
        #         raise PreferenceSetError("'{0}' is not a valid value for setting of {1} in {2} of {3}".
        #                                  format(candidate_setting, pref_ivar_name, self.name, owner_name))
        #     else if

        return setting_OK
    def _instantiate_receiver(self, context=None):
        """Validate that receiver has been assigned and is compatible with the output of function

        Set learning_enabled to value of receiver if it was not otherwise specified in the constructor

        Notes:
        * _validate_params verifies that receiver is a parameterPort for the matrix parameter of a MappingProjection.
        * _super()._instantiate_receiver verifies that the projection has not already been assigned to the receiver.

        """

        super()._instantiate_receiver(context=context)

        # Insure that the learning_signal is compatible with the receiver's weight matrix
        if not iscompatible(self.defaults.value,
                            self.receiver.defaults.variable):
            raise LearningProjectionError(
                "The learning_signal of {} ({}) is not compatible with the matrix of "
                "the MappingProjection ({}) to which it is being assigned ({})"
                .format(self.name, self.defaults.value,
                        self.receiver.defaults.value,
                        self.receiver.owner.name))

        # Insure that learning_signal has the same shape as the receiver's weight matrix
        try:
            receiver_weight_matrix_shape = np.array(
                self.receiver.defaults.value).shape
        except TypeError:
            receiver_weight_matrix_shape = 1
        try:
            learning_signal_shape = np.array(self.defaults.value).shape
        except TypeError:
            learning_signal_shape = 1

        # FIX: SHOULD TEST WHETHER IT CAN BE USED, NOT WHETHER IT IS THE SAME SHAPE
        learning_mechanism = self.sender.owner
        learned_projection = self.receiver.owner

        # Set learning_enabled to value of its LearningMechanism sender if it was not specified in the constructor
        if self.learning_enabled is None:
            self.learning_enabled = self.parameters.learning_enabled.default_value = learning_mechanism.learning_enabled

        # Check if learning_mechanism receives a projection from an ObjectiveMechanism;
        #    if it does, assign it to the objective_mechanism attribute for the projection being learned

        # FIX: REMOVE WHEN System IS FULLY DEPRECATED
        # MODIFIED 7/15/19 OLD: JDC RESTORED TO ALLOW SYSTEM TO WORK (DOESN"T SEEM TO TRASH BP)
        # KAM Commented out next 8 lines on 6/24/19 to get past bug in multilayer backprop on Composition
        try:
            candidate_objective_mech = learning_mechanism.input_ports[
                ERROR_SIGNAL].path_afferents[0].sender.owner
            if isinstance(candidate_objective_mech, ObjectiveMechanism
                          ) and candidate_objective_mech._role is LEARNING:
                learned_projection.objective_mechanism = candidate_objective_mech
        except TypeError:
            # learning_mechanism does not receive from an ObjectiveMechanism
            #    (e.g., AutoAssociativeLearningMechanism, which receives straight from a ProcessingMechanism)
            pass
        # MODIFIED 7/15/19 END

        learned_projection.learning_mechanism = learning_mechanism
        learned_projection.has_learning_projection = self
    def _validate_params(self, request_set, target_set=None, context=None):
        """If sample and target values are specified, validate that they are compatible
        """

        if INPUT_PORTS in request_set and request_set[INPUT_PORTS] is not None:
            input_ports = request_set[INPUT_PORTS]

            # Validate that there are exactly two input_ports (for sample and target)
            num_input_ports = len(input_ports)
            if num_input_ports != 2:
                raise ComparatorMechanismError(f"{INPUT_PORTS} arg is specified for {self.__class__.__name__} "
                                               f"({len(input_ports)}), so it must have exactly 2 items, "
                                               f"one each for {SAMPLE} and {TARGET}.")

            # Validate that input_ports are specified as dicts
            if not all(isinstance(input_port,dict) for input_port in input_ports):
                raise ComparatorMechanismError("PROGRAM ERROR: all items in input_port args must be converted to dicts"
                                               " by calling Port._parse_port_spec() before calling super().__init__")

            # Validate length of variable for sample = target
            if VARIABLE in input_ports[0]:
                # input_ports arg specified in standard port specification dict format
                lengths = [len(input_port[VARIABLE]) if input_port[VARIABLE] is not None else 0
                           for input_port in input_ports]
            else:
                # input_ports arg specified in {<Port_Name>:<PORT SPECIFICATION DICT>} format
                lengths = [len(list(input_port_dict.values())[0][VARIABLE]) for input_port_dict in input_ports]

            if lengths[0] != lengths[1]:
                raise ComparatorMechanismError(f"Length of value specified for {SAMPLE} InputPort "
                                               f"of {self.__class__.__name__} ({lengths[0]}) must be "
                                               f"same as length of value specified for {TARGET} ({lengths[1]}).")

        elif SAMPLE in request_set and TARGET in request_set:

            sample = request_set[SAMPLE]
            if isinstance(sample, InputPort):
                sample_value = sample.value
            elif isinstance(sample, Mechanism):
                sample_value = sample.input_value[0]
            elif is_value_spec(sample):
                sample_value = sample
            else:
                sample_value = None

            target = request_set[TARGET]
            if isinstance(target, InputPort):
                target_value = target.value
            elif isinstance(target, Mechanism):
                target_value = target.input_value[0]
            elif is_value_spec(target):
                target_value = target
            else:
                target_value = None

            if sample is not None and target is not None:
                if not iscompatible(sample, target, **{kwCompatibilityLength: True,
                                                       kwCompatibilityNumeric: True}):
                    raise ComparatorMechanismError(f"The length of the sample ({len(sample)}) "
                                                   f"must be the same as for the target ({len(target)})"
                                                   f"for {self.__class__.__name__} {self.name}.")

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)
示例#10
0
    def __init__(self,
                 custom_function=None,
                 default_variable=None,
                 params=None,
                 owner=None,
                 prefs: tc.optional(is_pref_set) = None,
                 **kwargs):

        def get_cust_fct_args(custom_function):
            """Get args of custom_function
            Return:
                - value of first arg (to be used as default_variable for UDF)
                - dict with all others (to be assigned as params of UDF)
                - dict with default values (from function definition, else set to None)
            """
            try:
                arg_names = custom_function.__code__.co_varnames
            except AttributeError:
                raise FunctionError("Can't get __code__ for custom_function")
            args = {}
            defaults = {}
            for arg_name, arg in signature(custom_function).parameters.items():

                # MODIFIED 3/6/19 NEW: [JDC]
                # Custom function specified owner as arg
                if arg_name in {SELF, OWNER, CONTEXT}:
                    # Flag for inclusion in call to function
                    if arg_name == SELF:
                        self.self_arg = True
                    elif arg_name == OWNER:
                        self.owner_arg = True
                    else:
                        self.context_arg = True
                    # Skip rest, as these don't need to be params
                    continue
                # MODIFIED 3/6/19 END

                # Use definition from the function as default;
                #    this allows UDF to assign a value for this instance (including a MODULATORY spec)
                #    while assigning an actual value to current/defaults
                if arg.default is _empty:
                    defaults[arg_name] = None

                else:
                    defaults[arg_name] = arg.default

                # If arg is specified in the constructor for the UDF, assign that as its value
                if arg_name in kwargs:
                    args[arg_name] = kwargs[arg_name]
                # Otherwise, use the default value from the definition of the function
                else:
                    args[arg_name] = defaults[arg_name]

            # Assign default value of first arg as variable and remove from dict
            variable = args[arg_names[0]]
            if variable is _empty:
                variable = None
            del args[arg_names[0]]

            return variable, args, defaults

        self.self_arg = False
        self.owner_arg = False
        self.context_arg = False

        # Get variable and names of other any other args for custom_function and assign to cust_fct_params
        if params is not None and CUSTOM_FUNCTION in params:
            custom_function = params[CUSTOM_FUNCTION]
        try:
            cust_fct_variable, self.cust_fct_params, defaults = get_cust_fct_args(custom_function)
        except FunctionError:
            raise FunctionError("Assignment of a built-in function or method ({}) to a {} is not supported".
                                format(custom_function, self.__class__.__name__))

        # If params is specified as arg in custom function's definition, move it to params in UDF's constructor
        if PARAMS in self.cust_fct_params:
            if self.cust_fct_params[PARAMS]:
                if params:
                    params.update(self.cust_fct_params)
                else:
                    params = self.cust_fct_params[PARAMS]
            del self.cust_fct_params[PARAMS]

        # If context is specified as arg in custom function's definition, delete it
        if CONTEXT in self.cust_fct_params:
            if self.cust_fct_params[CONTEXT]:
                context = self.cust_fct_params[CONTEXT]
            del self.cust_fct_params[CONTEXT]

        # Assign variable to default_variable if default_variable was not specified
        if default_variable is None:
            default_variable = cust_fct_variable
        elif cust_fct_variable and not iscompatible(default_variable, cust_fct_variable):
            owner_name = ' ({})'.format(owner.name) if owner else ''
            cust_fct_name = repr(custom_function.__name__)
            raise FunctionError("Value passed as \'default_variable\' for {} {} ({}) conflicts with specification of "
                                "first argument in constructor for {} itself ({}). "
                                "Try modifying specification of \'default_variable\' "
                                "for object to which {} is being assigned{}, and/or insuring that "
                                "the first argument of {} is at least a 2d array".
                                format(self.__class__.__name__, cust_fct_name, default_variable,
                                       cust_fct_name, cust_fct_variable, cust_fct_name, owner_name, cust_fct_name))

        super().__init__(
            default_variable=default_variable,
            custom_function=custom_function,
            params=params,
            owner=owner,
            prefs=prefs,
            **self.cust_fct_params
        )
    def _validate_params(self, request_set, target_set=None, context=None):
        """If sample and target values are specified, validate that they are compatible
        """

        if INPUT_STATES in request_set and request_set[
                INPUT_STATES] is not None:
            input_states = request_set[INPUT_STATES]

            # Validate that there are exactly two input_states (for sample and target)
            num_input_states = len(input_states)
            if num_input_states != 2:
                raise ComparatorMechanismError(
                    "{} arg is specified for {} ({}), so it must have exactly 2 items, "
                    "one each for {} and {}".format(INPUT_STATES,
                                                    self.__class__.__name__,
                                                    len(input_states), SAMPLE,
                                                    TARGET))

            # Validate that input_states are specified as dicts
            if not all(
                    isinstance(input_state, dict)
                    for input_state in input_states):
                raise ComparatorMechanismError(
                    "PROGRAM ERROR: all items in input_state args must be converted to dicts"
                    " by calling State._parse_state_spec() before calling super().__init__"
                )

            # Validate length of variable for sample = target
            if VARIABLE in input_states[0]:
                # input_states arg specified in standard state specification dict format
                lengths = [
                    len(input_state[VARIABLE]) for input_state in input_states
                ]
            else:
                # input_states arg specified in {<STATE_NAME>:<STATE SPECIFICATION DICT>} format
                lengths = [
                    len(list(input_state_dict.values())[0][VARIABLE])
                    for input_state_dict in input_states
                ]

            if lengths[0] != lengths[1]:
                raise ComparatorMechanismError(
                    "Length of value specified for {} InputState of {} ({}) must be "
                    "same as length of value specified for {} ({})".format(
                        SAMPLE, self.__class__.__name__, lengths[0], TARGET,
                        lengths[1]))

        elif SAMPLE in request_set and TARGET in request_set:

            sample = request_set[SAMPLE]
            if isinstance(sample, InputState):
                sample_value = sample.value
            elif isinstance(sample, Mechanism):
                sample_value = sample.input_value[0]
            elif is_value_spec(sample):
                sample_value = sample
            else:
                sample_value = None

            target = request_set[TARGET]
            if isinstance(target, InputState):
                target_value = target.value
            elif isinstance(target, Mechanism):
                target_value = target.input_value[0]
            elif is_value_spec(target):
                target_value = target
            else:
                target_value = None

            if sample is not None and target is not None:
                if not iscompatible(
                        sample, target, **{
                            kwCompatibilityLength: True,
                            kwCompatibilityNumeric: True
                        }):
                    raise ComparatorMechanismError(
                        "The length of the sample ({}) must be the same as for the target ({})"
                        "for {} {}".format(len(sample), len(target),
                                           self.__class__.__name__, self.name))

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)
示例#12
0
    def _instantiate_receiver(self, context=None):
        """Validate that receiver has been assigned and is compatible with the output of function

        Notes:
        * _validate_params verifies that receiver is a parameterState for the matrix parameter of a MappingProjection.
        * _super()._instantiate_receiver verifies that the projection has not already been assigned to the receiver.

        """

        super()._instantiate_receiver(context=context)

        # Insure that the learning_signal is compatible with the receiver's weight matrix
        if not iscompatible(self.defaults.value,
                            self.receiver.defaults.variable):
            raise LearningProjectionError(
                "The learning_signal of {} ({}) is not compatible with the matrix of "
                "the MappingProjection ({}) to which it is being assigned ({})"
                .format(self.name, self.defaults.value,
                        self.receiver.defaults.value,
                        self.receiver.owner.name))

        # Insure that learning_signal has the same shape as the receiver's weight matrix
        try:
            receiver_weight_matrix_shape = np.array(
                self.receiver.defaults.value).shape
        except TypeError:
            receiver_weight_matrix_shape = 1
        try:
            learning_signal_shape = np.array(self.defaults.value).shape
        except TypeError:
            learning_signal_shape = 1

        # If MappingProjection to which the receiver belongs has been assigned an Identity function (for efficiency),
        #    then it does not have an actual matrix;  so re-assign its defaults.value Identity matrix,
        #    which also forces reassignment of its _original_function (in _mapping_projection_matrix_setter)
        if isinstance(self.receiver.owner.function, Identity):
            self.receiver.owner.matrix = self.receiver.defaults.value

        # FIX: SHOULD TEST WHETHER IT CAN BE USED, NOT WHETHER IT IS THE SAME SHAPE
        # # MODIFIED 3/8/17 OLD:
        # if receiver_weight_matrix_shape != learning_signal_shape:
        #     raise ProjectionError("Shape ({}) of learing_signal matrix for {} from {}"
        #                           " must match shape of the weight matrix ({}) for the receiver {}".
        #                           format(learning_signal_shape,
        #                                  self.name,
        #                                  self.sender.name,
        #                                  receiver_weight_matrix_shape,
        #                                  self.receiver.owner.name))
        # MODIFIED 3/8/17 END

        learning_mechanism = self.sender.owner
        learned_projection = self.receiver.owner

        # Check if learning_mechanism receives a projection from an ObjectiveMechanism;
        #    if it does, assign it to the objective_mechanism attribute for the projection being learned
        try:
            candidate_objective_mech = learning_mechanism.input_states[
                ERROR_SIGNAL].path_afferents[0].sender.owner
            if isinstance(candidate_objective_mech, ObjectiveMechanism
                          ) and candidate_objective_mech._role is LEARNING:
                learned_projection.objective_mechanism = candidate_objective_mech
        except TypeError:
            # learning_mechanism does not receive from an ObjectiveMechanism
            #    (e.g., AutoAssociativeLearningMechanism, which receives straight from a ProcessingMechanism)
            pass
        learned_projection.learning_mechanism = learning_mechanism
        learned_projection.has_learning_projection = self