def __init__(
            self,
            default_variable=None,
            size=None,
            function=Linear,
            # selection_function=OneHot(mode=MAX_INDICATOR),  # RE-INSTATE WHEN IMPLEMENT NHot function
            integrator_function=AdaptiveIntegrator,
            initial_value=None,
            noise: is_numeric_or_none = 0.0,
            integration_rate: is_numeric_or_none = 0.5,
            integrator_mode=False,
            clip=None,
            enable_learning=True,
            learning_rate: tc.optional(tc.any(parameter_spec, bool)) = None,
            learning_function: is_function_type = Kohonen(
                distance_function=GAUSSIAN),
            learned_projection: tc.optional(MappingProjection) = None,
            additional_output_ports: tc.optional(tc.any(str, Iterable)) = None,
            name=None,
            prefs: is_pref_set = None,
            **kwargs):
        # # Default output_ports is specified in constructor as a string rather than a list
        # # to avoid "gotcha" associated with mutable default arguments
        # # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        # if output_ports is None:
        #     output_ports = [RESULT]

        output_ports = [
            RESULT, {
                NAME: INPUT_PATTERN,
                VARIABLE: OWNER_VARIABLE
            }
        ]
        if additional_output_ports:
            if isinstance(additional_output_ports, list):
                output_ports += additional_output_ports
            else:
                output_ports.append(additional_output_ports)

        self._learning_enabled = enable_learning
        self._learning_enable_deferred = False

        super().__init__(default_variable=default_variable,
                         size=size,
                         function=function,
                         integrator_function=integrator_function,
                         integrator_mode=integrator_mode,
                         learning_rate=learning_rate,
                         learning_function=learning_function,
                         learned_projection=learned_projection,
                         enable_learning=enable_learning,
                         initial_value=initial_value,
                         noise=noise,
                         integration_rate=integration_rate,
                         clip=clip,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
 def __call__(self, terms: tc.any(PV, list)) -> tc.any(PV, tuple):
     """Return subvector(s) for specified term(s)"""
     if not isinstance(terms, list):
         return self.idx[terms.value]
     else:
         return tuple(
             [self.idx[pv_member.value] for pv_member in terms])
Exemple #3
0
    def __init__(self,
                 sample: tc.optional(
                     tc.any(OutputPort, Mechanism_Base, dict, is_numeric,
                            str)) = None,
                 target: tc.optional(
                     tc.any(OutputPort, Mechanism_Base, dict, is_numeric,
                            str)) = None,
                 function=None,
                 output_ports: tc.optional(tc.optional(tc.any(
                     str, Iterable))) = None,
                 learning_rate: tc.optional(is_numeric) = None,
                 params=None,
                 name=None,
                 prefs: tc.optional(is_pref_set) = None,
                 **kwargs):

        input_ports = [sample, target]
        super().__init__(sample=sample,
                         target=target,
                         input_ports=input_ports,
                         function=function,
                         output_ports=output_ports,
                         learning_rate=learning_rate,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
Exemple #4
0
    def __init__(
            self,
            default_variable=None,
            matrix=HOLLOW_MATRIX,
            # metric:is_distance_metric=ENERGY,
            metric: tc.any(tc.enum(ENERGY, ENTROPY),
                           is_distance_metric) = ENERGY,
            transfer_fct: tc.optional(tc.any(function_type,
                                             method_type)) = None,
            normalize: bool = False,
            params=None,
            owner=None,
            prefs: is_pref_set = None):
        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(matrix=matrix,
                                                  metric=metric,
                                                  transfer_fct=transfer_fct,
                                                  normalize=normalize,
                                                  params=params)

        super().__init__(default_variable=default_variable,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)
Exemple #5
0
    def __init__(self,
                 sample: tc.optional(tc.any(OutputPort, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputPort, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_ports: tc.optional(tc.any(str, Iterable)) = None,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs
                 ):

        input_ports = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_ports=input_ports,
                         function=function,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs
                         )
Exemple #6
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 output_states: tc.optional(tc.any(str, Iterable)) = None,
                 function=Linear,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  params=params)

        super(ProcessingMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_states=input_states,
                             function=function,
                             output_states=output_states,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
    def __init__(self,
                 default_variable=None,
                 size=None,
                 function=Logistic,
                 matrix=None,
                 auto: is_numeric_or_none = None,
                 hetero: is_numeric_or_none = None,
                 integrator_function=AdaptiveIntegrator,
                 initial_value=None,
                 noise: is_numeric_or_none = 0.0,
                 integration_rate: is_numeric_or_none = 0.5,
                 integrator_mode=False,
                 k_value: is_numeric_or_none = 0.5,
                 threshold: is_numeric_or_none = 0,
                 ratio: is_numeric_or_none = 0.5,
                 average_based=False,
                 inhibition_only=True,
                 clip=None,
                 input_ports: tc.optional(tc.any(list, dict)) = None,
                 output_ports: tc.optional(tc.any(str, Iterable)) = RESULT,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):
        # Default output_ports is specified in constructor as a string rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if output_ports is None:
            output_ports = [RESULT]

        # this defaults the matrix to be an identity matrix (self excitation)
        if matrix is None:
            if auto is None:
                auto = 5  # this value is bad: there should be a better way to estimate this?
            if hetero is None:
                hetero = 0

        super().__init__(default_variable=default_variable,
                         size=size,
                         input_ports=input_ports,
                         function=function,
                         matrix=matrix,
                         auto=auto,
                         hetero=hetero,
                         integrator_function=integrator_function,
                         integrator_mode=integrator_mode,
                         k_value=k_value,
                         threshold=threshold,
                         ratio=ratio,
                         inhibition_only=inhibition_only,
                         average_based=average_based,
                         initial_value=initial_value,
                         noise=noise,
                         integration_rate=integration_rate,
                         clip=clip,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
Exemple #8
0
    def __init__(self,
                 sample: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_states: tc.optional(tc.any(str, Iterable)) = OUTCOME,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        input_states = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_states=input_states,
                         function=function,
                         output_states=output_states,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=context)
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 function=None,
                 output_states: tc.optional(tc.any(str, Iterable)) = RESULTS,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):
        """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__
        """

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(input_states=input_states,
                                                  output_states=output_states,
                                                  function=function,
                                                  params=params)

        super(IntegratorMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             function=function,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
Exemple #10
0
    def configure_learning(self,
                           learning_function:tc.optional(tc.any(is_function_type))=None,
                           learning_rate:tc.optional(tc.any(numbers.Number, list, np.ndarray, np.matrix))=None,
                           learned_projection:tc.optional(MappingProjection)=None,
                           context=None):
        """Provide user-accessible-interface to _instantiate_learning_mechanism

        Configure KohonenMechanism for learning. Creates the following Components:

        * a `LearningMechanism` -- if the **learning_function** and/or **learning_rate** arguments are
          specified, they are used to construct the LearningMechanism, otherwise the values specified in the
          KohonenMechanism's constructor are used;
        ..
        * a `MappingProjection` from the KohonenMechanism's `primary OutputPort <OutputPort_Primary>`
          to the LearningMechanism's *ACTIVATION_INPUT* InputPort;
        ..
        * a `LearningProjection` from the LearningMechanism's *LEARNING_SIGNAL* OutputPort to the learned_projection;
          by default this is the KohonenMechanism's `learned_projection <KohonenMechanism.learned_projection>`;
          however a different one can be specified.

        """
        # This insures that these are validated if the method is called from the command line (i.e., by the user)
        if learning_function:
            self.learning_function = learning_function
        if learning_rate:
            self.learning_rate = learning_rate
        if learned_projection:
            self.learned_projection = learned_projection

        # Assign learned_projection, using as default the first Projection to the Mechanism's primary InputPort
        try:
            self.learned_projection = self.learned_projection or self.input_port.path_afferents[0]
        except:
            self.learned_projection = None
        if not self.learned_projection:
            # Mechanism already belongs to a Process or System, so should have a MappingProjection by now
            if (self.processes or self.systems):
                raise KohonenError("Configuring learning for {} requires that it receive a {} "
                                   "from another {} within a {} to which it belongs".
                                   format(self.name, MappingProjection.__name__, Mechanism.__name__, Process.__name__))
                                   # "receive at least one {} or that the {} be specified".
                                   # format(self.name, MappingProjection.__name__, repr(LEARNED_PROJECTION)))
            # Mechanism doesn't yet belong to a Process or System, so wait until then to configure learning
            #  (this method will be called again from _add_projection_to_mechanism if a Projection is added)
            else:
                self._learning_enable_deferred = True
                return

        self.parameters.matrix._set(self.learned_projection.parameter_ports[MATRIX], context)

        self.learning_mechanism = self._instantiate_learning_mechanism(learning_function=self.learning_function,
                                                                       learning_rate=self.learning_rate,
                                                                       learned_projection=self.learned_projection,
                                                                       )

        self.learning_projection = self.learning_mechanism.output_ports[LEARNING_SIGNAL].efferents[0]

        if self.learning_mechanism is None:
            self.learning_enabled = False
def test_any2():
    nothing_at_all = ((type(None), ) * 1000)
    either_nothing = tc.any(tc.any(tc.any(tc.any(*nothing_at_all), *nothing_at_all), *nothing_at_all), *nothing_at_all)
    @tc.typecheck
    def biz(x) -> either_nothing:
        return x
    with expected(ReturnValueError("biz() has returned an incompatible value: anything")):
        biz("anything")
Exemple #12
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 function=None,
                 matrix=None,
                 auto: is_numeric_or_none=None,
                 hetero: is_numeric_or_none=None,
                 integrator_function=None,
                 initial_value=None,
                 noise: tc.optional(is_numeric_or_none) = None,
                 integration_rate: tc.optional(is_numeric_or_none) = None,
                 integrator_mode=None,
                 k_value: tc.optional(is_numeric_or_none) = None,
                 threshold: tc.optional(is_numeric_or_none) = None,
                 ratio: tc.optional(is_numeric_or_none) = None,
                 average_based=None,
                 inhibition_only=None,
                 clip=None,
                 input_ports:tc.optional(tc.optional(tc.any(list, dict))) = None,
                 output_ports:tc.optional(tc.any(str, Iterable))=None,
                 params=None,
                 name=None,
                 prefs: tc.optional(is_pref_set) = None,
                 **kwargs
                 ):
        # this defaults the matrix to be an identity matrix (self excitation)
        if matrix is None:
            if auto is None:
                auto = 5 # this value is bad: there should be a better way to estimate this?
            if hetero is None:
                hetero = 0

        super().__init__(
            default_variable=default_variable,
            size=size,
            input_ports=input_ports,
            function=function,
            matrix=matrix,
            auto=auto,
            hetero=hetero,
            integrator_function=integrator_function,
            integrator_mode=integrator_mode,
            k_value=k_value,
            threshold=threshold,
            ratio=ratio,
            inhibition_only=inhibition_only,
            average_based=average_based,
            initial_value=initial_value,
            noise=noise,
            integration_rate=integration_rate,
            clip=clip,
            output_ports=output_ports,
            params=params,
            name=name,
            prefs=prefs,
            **kwargs
        )
    def __init__(
            self,
            sender: tc.optional(tc.any(LearningSignal,
                                       LearningMechanism)) = None,
            receiver: tc.optional(tc.any(ParameterPort,
                                         MappingProjection)) = None,
            error_function: tc.optional(is_function_type) = LinearCombination(
                weights=[[-1], [1]]),
            learning_function: tc.optional(is_function_type) = BackPropagation,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params:tc.optional(dict)=None,
            learning_rate: tc.optional(tc.any(parameter_spec)) = None,
            learning_enabled: tc.optional(tc.any(bool, tc.enum(ONLINE,
                                                               AFTER))) = None,
            weight=None,
            exponent=None,
            params: tc.optional(dict) = None,
            name=None,
            prefs: is_pref_set = None,
            **kwargs):

        # IMPLEMENTATION NOTE:
        #     the error_function and learning_function arguments are implemented to preserve the ability to pass
        #     error function and learning function specifications from the specification of a LearningProjection (used
        #     to implement learning for a MappingProjection, e.g., in a tuple) to the LearningMechanism responsible
        #     for implementing the function; and for specifying the default LearningProjection for a Process.
        # If receiver has not been assigned, defer init to Port.instantiate_projection_to_state()
        if sender is None or receiver is None:
            # Flag for deferred initialization
            self.initialization_status = ContextFlags.DEFERRED_INIT

            # parameters should be passed through methods like
            # instantiate_sender instead of grabbed from attributes like this
            self._learning_function = learning_function
            self._learning_rate = learning_rate
            self._error_function = error_function

        # replaces similar code in _instantiate_sender
        try:
            if sender.owner.learning_rate is not None:
                learning_rate = sender.owner.learning_rate
        except AttributeError:
            pass

        super().__init__(sender=sender,
                         receiver=receiver,
                         weight=weight,
                         exponent=exponent,
                         params=params,
                         name=name,
                         prefs=prefs,
                         error_function=error_function,
                         learning_function=learning_function,
                         learning_rate=learning_rate,
                         learning_enabled=learning_enabled,
                         **kwargs)
Exemple #14
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(
                     tc.any(Iterable, Mechanism, OutputState,
                            InputState)) = None,
                 function=Linear,
                 initial_value=None,
                 noise=0.0,
                 time_constant=1.0,
                 integrator_mode=False,
                 clip=None,
                 output_states: tc.optional(tc.any(str, Iterable)) = RESULTS,
                 time_scale=TimeScale.TRIAL,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        """Assign type-level preferences and call super.__init__
        """

        # Default output_states is specified in constructor as a string rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if output_states is None or output_states is RESULTS:
            output_states = [RESULTS]

        params = self._assign_args_to_param_dicts(
            function=function,
            initial_value=initial_value,
            input_states=input_states,
            output_states=output_states,
            noise=noise,
            time_constant=time_constant,
            integrator_mode=integrator_mode,
            time_scale=time_scale,
            clip=clip,
            params=params)

        self.integrator_function = None

        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super(TransferMechanism, self).__init__(
            variable=default_variable,
            size=size,
            params=params,
            name=name,
            prefs=prefs,
            context=self,
            input_states=input_states,
        )
    def __init__(
            self,
            sender: tc.optional(tc.any(LearningSignal,
                                       LearningMechanism)) = None,
            receiver: tc.optional(tc.any(ParameterPort,
                                         MappingProjection)) = None,
            error_function: tc.optional(is_function_type) = LinearCombination(
                weights=[[-1], [1]]),
            learning_function: tc.optional(is_function_type) = BackPropagation,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params:tc.optional(dict)=None,
            learning_rate: tc.optional(tc.any(parameter_spec)) = None,
            learning_enabled: tc.optional(tc.any(bool, tc.enum(ONLINE,
                                                               AFTER))) = None,
            weight=None,
            exponent=None,
            params: tc.optional(dict) = None,
            name=None,
            prefs: is_pref_set = None,
            **kwargs):

        # IMPLEMENTATION NOTE:
        #     the error_function and learning_function arguments are implemented to preserve the ability to pass
        #     error function and learning function specifications from the specification of a LearningProjection (used
        #     to implement learning for a MappingProjection, e.g., in a tuple) to the LearningMechanism responsible
        #     for implementing the function; and for specifying the default LearningProjection for a Process.
        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(
            error_function=error_function,
            learning_function=learning_function,
            learning_rate=learning_rate,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params=learning_signal_params,
            learning_enabled=learning_enabled,
            weight=weight,
            exponent=exponent,
            params=params)

        # If receiver has not been assigned, defer init to Port.instantiate_projection_to_state()
        if sender is None or receiver is None:
            # Flag for deferred initialization
            self.initialization_status = ContextFlags.DEFERRED_INIT

        super().__init__(sender=sender,
                         receiver=receiver,
                         weight=weight,
                         exponent=exponent,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
Exemple #16
0
def test_any2():
    nothing_at_all = ((type(None), ) * 1000)
    either_nothing = tc.any(
        tc.any(tc.any(tc.any(*nothing_at_all), *nothing_at_all),
               *nothing_at_all), *nothing_at_all)

    @tc.typecheck
    def biz(x) -> either_nothing:
        return x

    with expected(
            tc.ReturnValueError(
                "biz() has returned an incompatible value: anything")):
        biz("anything")
Exemple #17
0
def _lower_neighbors(
    dist_mat: array_like_2d, max_scale: tc.any(int, float)
) -> tc.list_of(tc.list_of(np.int32)):
    """
    Converts a distance matrix to neighbor information.

    Takes a square, possibly lower triangular, and returns a list of lists of neighbor indices,
    for neighbors up to the specified scale.

    Parameters
    ----------
    dist_mat: 2D array
        the distance matrix, which may be lower triangular
    max_scale: float
        the highest scale (distance) to consider

    Returns
    -------
    neighbors: list of lists of int
    """
    d = sp.lil_matrix(dist_mat)
    d[d == 0] = sys.float_info.epsilon
    d[np.diag_indices(d.shape[0])] = 0
    d[d > max_scale] = 0
    d = sp.tril(d)
    result = [[] for i in range(d.shape[0])]
    for k, v in np.transpose(d.nonzero()):
        result[k].append(v)
    return result
Exemple #18
0
    def __init__(self,
                 sender=None,
                 receiver=None,
                 matrix=DEFAULT_MATRIX,
                 mask: tc.optional(
                     tc.any(int, float, list, np.ndarray, np.matrix)) = None,
                 mask_operation: tc.enum(ADD, MULTIPLY,
                                         EXPONENTIATE) = MULTIPLY,
                 function=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        params = self._assign_args_to_param_dicts(
            mask=mask,
            mask_operation=mask_operation,
            function_params={MATRIX: matrix},
            params=params)

        super().__init__(sender=sender,
                         receiver=receiver,
                         matrix=matrix,
                         function=function,
                         params=params,
                         name=name,
                         prefs=prefs)
Exemple #19
0
    def __init__(
            self,
            default_variable=None,
            size=None,
            # monitor_for_control:tc.optional(list)=None,
            mode: tc.optional(float) = 0.0,
            modulated_mechanisms: tc.optional(tc.any(list, str)) = None,
            modulation: tc.optional(
                _is_modulation_param) = ModulationParam.MULTIPLICATIVE,
            params=None,
            name=None,
            prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(
            mode=mode,
            modulated_mechanisms=modulated_mechanisms,
            params=params)

        super().__init__(
            default_variable=default_variable,
            size=size,
            # monitor_for_control=monitor_for_control,
            modulation=modulation,
            params=params,
            name=name,
            prefs=prefs,
            context=ContextFlags.CONSTRUCTOR)
Exemple #20
0
def _get_context(context: tc.any(ContextFlags, Context, str)):
    """Set flags based on a string of ContextFlags keywords
    If context is already a ContextFlags mask, return that
    Otherwise, return mask with flags set corresponding to keywords in context
    """
    # FIX: 3/23/18 UPDATE WITH NEW FLAGS
    if isinstance(context, ContextFlags):
        return context
    if isinstance(context, Context):
        context = context.string
    context_flag = ContextFlags.UNSET
    if VALIDATE in context:
        context_flag |= ContextFlags.VALIDATING
    if EXECUTING in context:
        context_flag |= ContextFlags.EXECUTING
    if CONTROL in context:
        context_flag |= ContextFlags.CONTROL
    if LEARNING in context:
        context_flag |= ContextFlags.LEARNING
    # if context == ContextFlags.TRIAL.name: # cxt-test
    #     context_flag |= ContextFlags.TRIAL
    # if context == ContextFlags.RUN.name:
    #     context_flag |= ContextFlags.RUN
    if context == ContextFlags.COMMAND_LINE.name:
        context_flag |= ContextFlags.COMMAND_LINE
    return context_flag
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(
                     tc.any(Iterable, Mechanism, OutputPort,
                            InputPort)) = None,
                 function=None,
                 composition=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.connected_to_composition = False

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  params=params)

        super(CompositionInterfaceMechanism, self).__init__(
            default_variable=default_variable,
            size=size,
            input_ports=input_ports,
            function=function,
            params=params,
            name=name,
            prefs=prefs,
        )
Exemple #22
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(tc.optional(tc.any(Iterable, Mechanism, OutputPort, InputPort))) = None,
                 function=None,
                 composition=None,
                 port_map=None,
                 params=None,
                 name=None,
                 prefs:is_pref_set=None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.port_map = port_map
        self.connected_to_composition = False
        self.user_added_ports = {
            INPUT_PORTS: set(),
            OUTPUT_PORTS: set()
        }
        super(CompositionInterfaceMechanism, self).__init__(default_variable=default_variable,
                                                            size=size,
                                                            input_ports=input_ports,
                                                            function=function,
                                                            params=params,
                                                            name=name,
                                                            prefs=prefs,
                                                            )
Exemple #23
0
    def __init__(self,
                 default_variable: tc.any(list, np.ndarray),
                 size=None,
                 function: tc.optional(is_function_type) = None,
                 learning_signals: tc.optional(tc.optional(list)) = None,
                 modulation: tc.optional(str) = None,
                 learning_rate: tc.optional(parameter_spec) = None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):

        # # USE FOR IMPLEMENTATION OF deferred_init()
        # # Store args for deferred initialization
        # self._init_args = locals().copy()
        # self._init_args['context'] = self
        # self._init_args['name'] = name

        # # Flag for deferred initialization
        # self.initialization_status = ContextFlags.DEFERRED_INIT
        # self.initialization_status = ContextFlags.DEFERRED_INIT

        # self._learning_rate = learning_rate

        super().__init__(default_variable=default_variable,
                         size=size,
                         function=function,
                         modulation=modulation,
                         learning_rate=learning_rate,
                         params=params,
                         name=name,
                         prefs=prefs,
                         learning_signals=learning_signals,
                         **kwargs)
Exemple #24
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(
                     tc.any(Iterable, Mechanism, OutputState,
                            InputState)) = None,
                 function=Identity(),
                 composition=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.connected_to_composition = False

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  input_states=input_states,
                                                  params=params)

        super(CompositionInterfaceMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_states=input_states,
                             function=function,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
Exemple #25
0
    def __init__(
        self,
        default_variable=None,
        sample: tc.optional(
            tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None,
        target: tc.optional(
            tc.any(OutputState, Mechanism_Base, dict, is_numeric, str)) = None,
        function=LinearCombination(weights=[[-1], [1]]),
        output_states: tc.optional(tc.any(str, Iterable)) = (OUTCOME, ),
        params=None,
        name=None,
        prefs: is_pref_set = None,
        **
        input_states  # IMPLEMENTATION NOTE: this is for backward compatibility
    ):

        input_states = self._merge_legacy_constructor_args(
            sample, target, default_variable, input_states)

        # Default output_states is specified in constructor as a tuple rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if isinstance(output_states, (str, tuple)):
            output_states = list(output_states)

        # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment
        #                     (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE
        # output_states = output_states or [OUTCOME, MSE]

        # Create a StandardOutputStates object from the list of stand_output_states specified for the class
        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super().__init__(  # monitor=[sample, target],
            monitor=input_states,
            function=function,
            output_states=output_states.copy(
            ),  # prevent default from getting overwritten by later assign
            params=params,
            name=name,
            prefs=prefs,
            context=ContextFlags.CONSTRUCTOR)

        # Require Projection to TARGET InputState (already required for SAMPLE as primary InputState)
        self.input_states[1].parameters.require_projection_in_composition.set(
            True, override=True)
    def __init__(self,
                 default_variable=None,
                 sample: tc.optional(
                     tc.any(OutputPort, Mechanism_Base, dict, is_numeric,
                            str)) = None,
                 target: tc.optional(
                     tc.any(OutputPort, Mechanism_Base, dict, is_numeric,
                            str)) = None,
                 function=None,
                 output_ports: tc.optional(tc.optional(tc.any(
                     str, Iterable))) = None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):

        input_ports = kwargs.pop(INPUT_PORTS, {})
        if input_ports:
            input_ports = {INPUT_PORTS: input_ports}

        input_ports = self._merge_legacy_constructor_args(
            sample, target, default_variable, input_ports)

        # Default output_ports is specified in constructor as a tuple rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if isinstance(output_ports, (str, tuple)):
            output_ports = list(output_ports)

        # IMPLEMENTATION NOTE: The following prevents the default from being updated by subsequent assignment
        #                     (in this case, to [OUTCOME, {NAME= MSE}]), but fails to expose default in IDE
        # output_ports = output_ports or [OUTCOME, MSE]

        super().__init__(
            monitor=input_ports,
            function=function,
            output_ports=
            output_ports,  # prevent default from getting overwritten by later assign
            params=params,
            name=name,
            prefs=prefs,
            **kwargs)

        # Require Projection to TARGET InputPort (already required for SAMPLE as primary InputPort)
        self.input_ports[1].parameters.require_projection_in_composition.set(
            True, override=True)
    def __init__(
            self,
            sender: tc.optional(tc.any(LearningSignal,
                                       LearningMechanism)) = None,
            receiver: tc.optional(tc.any(ParameterState,
                                         MappingProjection)) = None,
            learning_function: tc.optional(is_function_type) = BackPropagation,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params:tc.optional(dict)=None,
            learning_rate: tc.optional(tc.any(parameter_spec)) = None,
            weight=None,
            exponent=None,
            params: tc.optional(dict) = None,
            name=None,
            prefs: is_pref_set = None,
            context=None):

        # IMPLEMENTATION NOTE:
        #     the learning_function argument is implemented to preserve the ability to pass a learning function
        #     specification from the specification of a LearningProjection (used to implement learning for a
        #     MappingProjection, e.g., in a tuple) to the LearningMechanism responsible for implementing the function

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(
            learning_function=learning_function,
            learning_rate=learning_rate,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params=learning_signal_params,
            weight=weight,
            exponent=exponent,
            params=params)

        # If receiver has not been assigned, defer init to State.instantiate_projection_to_state()
        if sender is None or receiver is None:
            # Flag for deferred initialization
            self.init_status = InitStatus.DEFERRED_INITIALIZATION
        super().__init__(sender=sender,
                         receiver=receiver,
                         weight=weight,
                         exponent=exponent,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=self)
        self.learning_enable = True
Exemple #28
0
    def _get_context_string(cls, condition_flags,
                            fields:tc.any(tc.enum(INITIALIZATION_STATUS,
                                                  EXECUTION_PHASE,
                                                  SOURCE), set, list)={INITIALIZATION_STATUS,
                                                                       EXECUTION_PHASE,
                                                                       SOURCE},
                            string:tc.optional(str)=None):
        """Return string with the names of flags that are set in **condition_flags**

        If **fields** is specified, then only the names of the flag(s) in the specified field(s) are returned.
        The fields argument must be the name of a field (*INITIALIZATION_STATUS*, *EXECUTION_PHASE*, or *SOURCE*)
        or a set or list of them.

        If **string** is specified, the string returned is prepended by **string**.
        """

        if string:
            string += ": "
        else:
            string = ""

        if isinstance(fields, str):
            fields = {fields}

        flagged_items = []
        # If OFF or ALL_FLAGS, just return that
        if condition_flags == ContextFlags.ALL_FLAGS:
            return ContextFlags.ALL_FLAGS.name
        if condition_flags == ContextFlags.UNSET:
            return ContextFlags.UNSET.name
        # Otherwise, append each flag's name to the string
        # for c in (INITIALIZATION_STATUS_FLAGS | EXECUTION_PHASE_FLAGS | SOURCE_FLAGS):
        #     if c & condition_flags:
        #        flagged_items.append(c.name)
        if INITIALIZATION_STATUS in fields:
            for c in INITIALIZATION_STATUS_FLAGS:
                if not condition_flags & ContextFlags.INITIALIZATION_MASK:
                    flagged_items.append(ContextFlags.UNINITIALIZED.name)
                    break
                if c & condition_flags:
                   flagged_items.append(c.name)
        if EXECUTION_PHASE in fields:
            for c in EXECUTION_PHASE_FLAGS:
                if not condition_flags & ContextFlags.EXECUTION_PHASE_MASK:
                    flagged_items.append(ContextFlags.IDLE.name)
                    break
                if c & condition_flags:
                   flagged_items.append(c.name)
        if SOURCE in fields:
            for c in SOURCE_FLAGS:
                if not condition_flags & ContextFlags.SOURCE_MASK:
                    flagged_items.append(ContextFlags.NONE.name)
                    break
                if c & condition_flags:
                   flagged_items.append(c.name)
        string += ", ".join(flagged_items)
        return string
 def __init__(self,
              default_variable=None,
              size=None,
              input_ports: tc.optional(tc.any(list, dict)) = None,
              output_ports: tc.optional(tc.any(str, Iterable)) = None,
              function=Linear,
              params=None,
              name=None,
              prefs: is_pref_set = None,
              **kwargs):
     super(ProcessingMechanism,
           self).__init__(default_variable=default_variable,
                          size=size,
                          input_ports=input_ports,
                          function=function,
                          output_ports=output_ports,
                          params=params,
                          name=name,
                          prefs=prefs,
                          **kwargs)
    def __init__(
            self,
            default_variable=None,
            size=None,
            matrix=HOLLOW_MATRIX,
            # metric:is_distance_metric=ENERGY,
            metric: tc.any(tc.enum(ENERGY, ENTROPY),
                           is_distance_metric) = ENERGY,
            transfer_fct: tc.optional(tc.any(function_type,
                                             method_type)) = None,
            normalize: bool = False,
            params=None,
            owner=None,
            prefs: is_pref_set = None):

        if size:
            if default_variable is None:
                default_variable = np.zeros(size)
            elif size != len(default_variable):
                raise FunctionError(
                    f"Both {repr(DEFAULT_VARIABLE)} ({default_variable}) and {repr(SIZE)} ({size}) "
                    f"are specified for {self.name} but are {SIZE}!=len({DEFAULT_VARIABLE})."
                )

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(matrix=matrix,
                                                  metric=metric,
                                                  transfer_fct=transfer_fct,
                                                  normalize=normalize,
                                                  params=params)

        super().__init__(
            default_variable=default_variable,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        # MODIFIED 6/12/19 NEW: [JDC]
        self._default_variable_flexibility = DefaultsFlexibility.FLEXIBLE
    def __init__(
            self,
            default_variable=None,
            size=None,
            matrix=None,
            # metric:is_distance_metric=None,
            metric: tc.optional(
                tc.any(tc.enum(ENERGY, ENTROPY), is_distance_metric)) = None,
            transfer_fct: tc.optional(
                tc.optional(tc.any(types.FunctionType,
                                   types.MethodType))) = None,
            normalize: tc.optional(bool) = None,
            params=None,
            owner=None,
            prefs: tc.optional(is_pref_set) = None):

        if size:
            if default_variable is None:
                default_variable = np.zeros(size)
            elif size != len(default_variable):
                raise FunctionError(
                    f"Both {repr(DEFAULT_VARIABLE)} ({default_variable}) and {repr(SIZE)} ({size}) "
                    f"are specified for {self.name} but are {SIZE}!=len({DEFAULT_VARIABLE})."
                )

        super().__init__(
            default_variable=default_variable,
            matrix=matrix,
            metric=metric,
            transfer_fct=transfer_fct,
            normalize=normalize,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        # MODIFIED 6/12/19 NEW: [JDC]
        self._variable_shape_flexibility = DefaultsFlexibility.FLEXIBLE
 def foo_all(arg: tc.all(tc.any(bytes,bytearray), complete_blocks)): pass
 foo_all(b"x" * 512)              # OK
 def accept_number(x: tc.any(int, tc.re("^[0-9]+$"))):
     return int(x) + 1
 def bar(x: tc.any((int, float), tc.re("^foo$"), tc.enum(b"X", b"Y"))):
     pass
 def foo(x: tc.any()):
     pass
bar((1, 1.0))
bar("foo")
bar(b"X")
bar(b"Y")

with expected(InputParameterError("bar() has got an incompatible value for x: (1.0, 1)")):
    bar((1.0, 1))
with expected(InputParameterError("bar() has got an incompatible value for x: b'foo'")):
    bar(b"foo")
with expected(InputParameterError("bar() has got an incompatible value for x: X")):
    bar("X")
with expected(InputParameterError("bar() has got an incompatible value for x: Y")):
    bar("Y")

nothing_at_all = ((type(None), ) * 1000)
either_nothing = tc.any(tc.any(tc.any(tc.any(*nothing_at_all), *nothing_at_all), *nothing_at_all), *nothing_at_all)

@typecheck
def biz(x) -> either_nothing:
    return x

with expected(ReturnValueError("biz() has returned an incompatible value: anything")):
    biz("anything")

@typecheck
def accept_number(x: tc.any(int, tc.has("^[0-9]+$"))):
    return int(x) + 1

assert accept_number(1) == 2
assert accept_number("1") == 2
assert accept_number(-1) == 0