def get_hetero_matrix(raw_hetero, size):
    if isinstance(raw_hetero, numbers.Number):
        return get_matrix(HOLLOW_MATRIX, size, size) * raw_hetero
    elif ((isinstance(raw_hetero, np.ndarray) and raw_hetero.ndim == 1) or
          (isinstance(raw_hetero, list) and np.array(raw_hetero).ndim == 1)):
        if len(raw_hetero) != 1:
            return None
        return get_matrix(HOLLOW_MATRIX, size, size) * raw_hetero[0]
    elif (isinstance(raw_hetero, np.matrix)
          or (isinstance(raw_hetero, np.ndarray) and raw_hetero.ndim == 2) or
          (isinstance(raw_hetero, list) and np.array(raw_hetero).ndim == 2)):
        np.fill_diagonal(raw_hetero, 0)
        return np.array(raw_hetero)
    else:
        return None
Exemple #2
0
    def test_recurrent_mech_matrix_keyword_spec(self):

        for m in MATRIX_KEYWORD_VALUES:
            if m == RANDOM_CONNECTIVITY_MATRIX:
                continue
            R = RecurrentTransferMechanism(name='R', size=4, matrix=m)
            val = R.execute([10, 10, 10, 10])
            np.testing.assert_allclose(val, [[10., 10., 10., 10.]])
            np.testing.assert_allclose(R.recurrent_projection.matrix,
                                       get_matrix(m, R.size[0], R.size[0]))
    def _instantiate_parameter_states(self, function=None, context=None):

        super()._instantiate_parameter_states(function=function, context=context)

        # FIX: UPDATE FOR LEARNING
        # FIX: UPDATE WITH MODULATION_MODS
        # FIX: MOVE THIS TO MappingProjection.__init__;
        # FIX: AS IT IS, OVER-WRITES USER ASSIGNMENT OF FUNCTION IN params dict FOR MappingProjection
        matrix = get_matrix(self._parameter_states[MATRIX].value)
        initial_rate = matrix * 0.0

        self._parameter_states[MATRIX].function_object = AccumulatorIntegrator(owner=self._parameter_states[MATRIX],
                                                                            initializer=matrix,
                                                                            # rate=initial_rate
                                                                               )
        self._parameter_states[MATRIX]._function = self._parameter_states[MATRIX].function_object.function
Exemple #4
0
    def _validate_params(self, request_set, target_set=None, context=None):
        """Validate **mask** argument"""

        super()._validate_params(request_set=request_set,
                                 target_set=target_set,
                                 context=context)

        if MASK in target_set and target_set[MASK]:
            mask = target_set[MASK]
            if isinstance(mask, (int, float)):
                return
            mask_shape = np.array(mask).shape
            matrix = get_matrix(self.user_params[FUNCTION_PARAMS][MATRIX],
                                len(self.sender.value),
                                len(self.receiver.value))
            matrix_shape = matrix.shape
            if mask_shape != matrix_shape:
                raise MaskedMappingProjectionError(
                    "Shape of the {} for {} ({}) "
                    "must be the same as its {} ({})".format(
                        repr(MASK), self.name, mask_shape, repr(MATRIX),
                        matrix_shape))
    def _instantiate_receiver(self, context=None):
        """Determine matrix needed to map from sender to receiver

        Assign specification to self.matrix_spec attribute
        Assign matrix to self.matrix attribute

        """
        self.reshapedWeightMatrix = False

        # Get sender and receiver lengths
        # Note: if either is a scalar, manually set length to 1 to avoid TypeError in call to len()
        try:
            mapping_input_len = len(self.instance_defaults.variable)
        except TypeError:
            mapping_input_len = 1
        try:
            receiver_len = len(self.receiver.instance_defaults.variable)
        except TypeError:
            receiver_len = 1

        # Compare length of MappingProjection output and receiver's variable to be sure matrix has proper dimensions
        try:
            mapping_output_len = len(self.value)
        except TypeError:
            mapping_output_len = 1

        # FIX: CONVERT ALL REFS TO paramsCurrent[FUNCTION_PARAMS][MATRIX] TO self.matrix (CHECK THEY'RE THE SAME)
        # FIX: CONVERT ALL REFS TO matrix_spec TO self._matrix_spec
        # FIX: CREATE @PROPERTY FOR self._learning_spec AND ASSIGN IN INIT??
        # FIX: HOW DOES mapping_output_len RELATE TO receiver_len?/

        if self._matrix_spec is AUTO_ASSIGN_MATRIX:
            if mapping_input_len == receiver_len:
                self._matrix_spec = IDENTITY_MATRIX
            else:
                self._matrix_spec = FULL_CONNECTIVITY_MATRIX

        # Length of the output of the Projection doesn't match the length of the receiving input state
        #    so consider reshaping the matrix
        if mapping_output_len != receiver_len:

            if 'projection' in self.name or 'Projection' in self.name:
                projection_string = ''
            else:
                projection_string = 'projection'

            if all(string in self.name for string in {'from', 'to'}):
                states_string = ''
            else:
                states_string = "from \'{}\' OuputState of \'{}\' to \'{}\'".format(self.sender.name,
                                                                                    self.sender.owner.name,
                                                                                    self.receiver.owner.name)
            if not isinstance(self._matrix_spec, str):
                # if all(string in self.name for string in {'from', 'to'}):

                raise ProjectionError("Width ({}) of the {} of \'{}{}\'{} "
                                      "does not match the length of its \'{}\' InputState ({})".
                                      format(mapping_output_len,
                                             VALUE,
                                             self.name,
                                             projection_string,
                                             states_string,
                                             self.receiver.name,
                                             receiver_len))

            elif self._matrix_spec == IDENTITY_MATRIX or self._matrix_spec == HOLLOW_MATRIX:
                # Identity matrix is not reshapable
                raise ProjectionError("Output length ({}) of \'{}{}\' from {} to Mechanism \'{}\'"
                                      " must equal length of it InputState ({}) to use {}".
                                      format(mapping_output_len,
                                             self.name,
                                             projection_string,
                                             self.sender.name,
                                             self.receiver.owner.name,
                                             receiver_len,
                                             self._matrix_spec))
            else:
                # Flag that matrix is being reshaped
                self.reshapedWeightMatrix = True
                if self.prefs.verbosePref:
                    print("Length ({}) of the output of {}{} does not match the length ({}) "
                          "of the InputState for the receiver {}; the width of the matrix (number of columns); "
                          "the width of the matrix (number of columns) will be adjusted to accomodate the receiver".
                          format(mapping_output_len,
                                 self.name,
                                 projection_string,
                                 receiver_len,
                                 self.receiver.owner.name))

                self._matrix = get_matrix(self._matrix_spec, mapping_input_len, receiver_len, context=context)

                # Since matrix shape has changed, output of self.function may have changed, so update self.value
                self._update_value()

        super()._instantiate_receiver(context=context)