Ejemplo n.º 1
0
    def __init__(self,
                 default_variable=None,
                 mode: tc.optional(
                     tc.enum(MAX_VAL, MAX_ABS_VAL, MAX_INDICATOR,
                             MAX_ABS_INDICATOR, MIN_VAL, MIN_ABS_VAL,
                             MIN_INDICATOR, MIN_ABS_INDICATOR, PROB,
                             PROB_INDICATOR)) = None,
                 seed=None,
                 params=None,
                 owner=None,
                 prefs: tc.optional(is_pref_set) = None):

        reset_variable_shape_flexibility = False
        if mode in {PROB, PROB_INDICATOR} and default_variable is None:
            default_variable = [[0], [0]]
            reset_variable_shape_flexibility = True

        super().__init__(
            default_variable=default_variable,
            mode=mode,
            seed=seed,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        if reset_variable_shape_flexibility:
            self._variable_shape_flexibility = DefaultsFlexibility.FLEXIBLE
Ejemplo n.º 2
0
    def __init__(self,
                 default_variable=None,
                 mode: tc.enum(MAX_VAL, MAX_ABS_VAL, MAX_INDICATOR,
                               MAX_ABS_INDICATOR, MIN_VAL, MIN_ABS_VAL,
                               MIN_INDICATOR, MIN_ABS_INDICATOR, PROB,
                               PROB_INDICATOR) = MAX_VAL,
                 seed=None,
                 params=None,
                 owner=None,
                 prefs: is_pref_set = None):

        if seed is None:
            seed = get_global_seed()

        random_state = np.random.RandomState([seed])
        if not hasattr(self, "stateful_attributes"):
            self.stateful_attributes = ["random_state"]

        reset_default_variable_flexibility = False
        if mode in {PROB, PROB_INDICATOR} and default_variable is None:
            default_variable = [[0], [0]]
            reset_default_variable_flexibility = True

        super().__init__(
            default_variable=default_variable,
            mode=mode,
            random_state=random_state,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        if reset_default_variable_flexibility:
            self._default_variable_flexibility = DefaultsFlexibility.FLEXIBLE
Ejemplo n.º 3
0
    def __init__(
            self,
            default_variable=None,
            matrix=HOLLOW_MATRIX,
            # metric:is_distance_metric=ENERGY,
            metric: tc.any(tc.enum(ENERGY, ENTROPY),
                           is_distance_metric) = ENERGY,
            transfer_fct: tc.optional(tc.any(function_type,
                                             method_type)) = None,
            normalize: bool = False,
            params=None,
            owner=None,
            prefs: is_pref_set = None):
        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(matrix=matrix,
                                                  metric=metric,
                                                  transfer_fct=transfer_fct,
                                                  normalize=normalize,
                                                  params=params)

        super().__init__(default_variable=default_variable,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)
Ejemplo n.º 4
0
    def __init__(self,
                 sender=None,
                 receiver=None,
                 matrix=DEFAULT_MATRIX,
                 mask: tc.optional(
                     tc.any(int, float, list, np.ndarray, np.matrix)) = None,
                 mask_operation: tc.enum(ADD, MULTIPLY,
                                         EXPONENTIATE) = MULTIPLY,
                 function=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        params = self._assign_args_to_param_dicts(
            mask=mask,
            mask_operation=mask_operation,
            function_params={MATRIX: matrix},
            params=params)

        super().__init__(sender=sender,
                         receiver=receiver,
                         matrix=matrix,
                         function=function,
                         params=params,
                         name=name,
                         prefs=prefs)
Ejemplo n.º 5
0
def _is_pathway_entry_spec(entry, desired_type: tc.enum(NODE, PROJECTION,
                                                        ANY)):
    """Test whether pathway entry is specified type (NODE or PROJECTION)"""
    from psyneulink.core.components.projections.projection import _is_projection_spec
    node_specs = (Mechanism, Composition)
    is_node = is_proj = False

    if desired_type in {NODE, ANY}:
        is_node = (isinstance(entry, node_specs)
                   or (isinstance(entry, tuple)
                       and isinstance(entry[0], node_specs) and
                       (isinstance(entry[1], NodeRole) or
                        (isinstance(entry[1], list)
                         and all(isinstance(nr, NodeRole)
                                 for nr in entry[1])))))

    if desired_type in {PROJECTION, ANY}:
        is_proj = (_is_projection_spec(entry) or
                   (isinstance(entry, tuple) and _is_projection_spec(entry[0])
                    and entry[1] in {True, FEEDBACK, False, MAYBE}))

    if is_node or is_proj:
        return True
    else:
        return False
def test_enum5():
   pred = tc.enum(1, 2.0, "three", [1]*4)
   assert pred(2*1.0) 
   assert pred("thr"+2*"e") 
   assert pred([1,1,1,1]) 
   assert pred(1.0) 
   assert not pred("thr") 
   assert not pred([1,1])
Ejemplo n.º 7
0
def test_enum5():
    pred = tc.enum(1, 2.0, "three", [1] * 4)
    assert pred(2 * 1.0)
    assert pred("thr" + 2 * "e")
    assert pred([1, 1, 1, 1])
    assert pred(1.0)
    assert not pred("thr")
    assert not pred([1, 1])
Ejemplo n.º 8
0
    def _get_context_string(cls, condition_flags,
                            fields:tc.any(tc.enum(INITIALIZATION_STATUS,
                                                  EXECUTION_PHASE,
                                                  SOURCE), set, list)={INITIALIZATION_STATUS,
                                                                       EXECUTION_PHASE,
                                                                       SOURCE},
                            string:tc.optional(str)=None):
        """Return string with the names of flags that are set in **condition_flags**

        If **fields** is specified, then only the names of the flag(s) in the specified field(s) are returned.
        The fields argument must be the name of a field (*INITIALIZATION_STATUS*, *EXECUTION_PHASE*, or *SOURCE*)
        or a set or list of them.

        If **string** is specified, the string returned is prepended by **string**.
        """

        if string:
            string += ": "
        else:
            string = ""

        if isinstance(fields, str):
            fields = {fields}

        flagged_items = []
        # If OFF or ALL_FLAGS, just return that
        if condition_flags == ContextFlags.ALL_FLAGS:
            return ContextFlags.ALL_FLAGS.name
        if condition_flags == ContextFlags.UNSET:
            return ContextFlags.UNSET.name
        # Otherwise, append each flag's name to the string
        # for c in (INITIALIZATION_STATUS_FLAGS | EXECUTION_PHASE_FLAGS | SOURCE_FLAGS):
        #     if c & condition_flags:
        #        flagged_items.append(c.name)
        if INITIALIZATION_STATUS in fields:
            for c in INITIALIZATION_STATUS_FLAGS:
                if not condition_flags & ContextFlags.INITIALIZATION_MASK:
                    flagged_items.append(ContextFlags.UNINITIALIZED.name)
                    break
                if c & condition_flags:
                   flagged_items.append(c.name)
        if EXECUTION_PHASE in fields:
            for c in EXECUTION_PHASE_FLAGS:
                if not condition_flags & ContextFlags.EXECUTION_PHASE_MASK:
                    flagged_items.append(ContextFlags.IDLE.name)
                    break
                if c & condition_flags:
                   flagged_items.append(c.name)
        if SOURCE in fields:
            for c in SOURCE_FLAGS:
                if not condition_flags & ContextFlags.SOURCE_MASK:
                    flagged_items.append(ContextFlags.NONE.name)
                    break
                if c & condition_flags:
                   flagged_items.append(c.name)
        string += ", ".join(flagged_items)
        return string
def test_enum5():
    namespace = None
    pred = tc.enum(1, 2.0, "three", [1] * 4)
    assert pred(2 * 1.0, namespace)
    assert pred("thr" + 2 * "e", namespace)
    assert pred([1, 1, 1, 1], namespace)
    assert pred(1.0, namespace)
    assert not pred("thr", namespace)
    assert not pred([1, 1], namespace)
Ejemplo n.º 10
0
def test_enum5():
    namespace = None
    pred = tc.enum(1, 2.0, "three", [1] * 4)
    assert pred(2 * 1.0, namespace)
    assert pred("thr" + 2 * "e", namespace)
    assert pred([1, 1, 1, 1], namespace)
    assert pred(1.0, namespace)
    assert not pred("thr", namespace)
    assert not pred([1, 1], namespace)
Ejemplo n.º 11
0
    def __init__(
            self,
            sender: tc.optional(tc.any(LearningSignal,
                                       LearningMechanism)) = None,
            receiver: tc.optional(tc.any(ParameterPort,
                                         MappingProjection)) = None,
            error_function: tc.optional(is_function_type) = LinearCombination(
                weights=[[-1], [1]]),
            learning_function: tc.optional(is_function_type) = BackPropagation,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params:tc.optional(dict)=None,
            learning_rate: tc.optional(tc.any(parameter_spec)) = None,
            learning_enabled: tc.optional(tc.any(bool, tc.enum(ONLINE,
                                                               AFTER))) = None,
            weight=None,
            exponent=None,
            params: tc.optional(dict) = None,
            name=None,
            prefs: is_pref_set = None,
            **kwargs):

        # IMPLEMENTATION NOTE:
        #     the error_function and learning_function arguments are implemented to preserve the ability to pass
        #     error function and learning function specifications from the specification of a LearningProjection (used
        #     to implement learning for a MappingProjection, e.g., in a tuple) to the LearningMechanism responsible
        #     for implementing the function; and for specifying the default LearningProjection for a Process.
        # If receiver has not been assigned, defer init to Port.instantiate_projection_to_state()
        if sender is None or receiver is None:
            # Flag for deferred initialization
            self.initialization_status = ContextFlags.DEFERRED_INIT

            # parameters should be passed through methods like
            # instantiate_sender instead of grabbed from attributes like this
            self._learning_function = learning_function
            self._learning_rate = learning_rate
            self._error_function = error_function

        # replaces similar code in _instantiate_sender
        try:
            if sender.owner.learning_rate is not None:
                learning_rate = sender.owner.learning_rate
        except AttributeError:
            pass

        super().__init__(sender=sender,
                         receiver=receiver,
                         weight=weight,
                         exponent=exponent,
                         params=params,
                         name=name,
                         prefs=prefs,
                         error_function=error_function,
                         learning_function=learning_function,
                         learning_rate=learning_rate,
                         learning_enabled=learning_enabled,
                         **kwargs)
Ejemplo n.º 12
0
    def __init__(
            self,
            sender: tc.optional(tc.any(LearningSignal,
                                       LearningMechanism)) = None,
            receiver: tc.optional(tc.any(ParameterPort,
                                         MappingProjection)) = None,
            error_function: tc.optional(is_function_type) = LinearCombination(
                weights=[[-1], [1]]),
            learning_function: tc.optional(is_function_type) = BackPropagation,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params:tc.optional(dict)=None,
            learning_rate: tc.optional(tc.any(parameter_spec)) = None,
            learning_enabled: tc.optional(tc.any(bool, tc.enum(ONLINE,
                                                               AFTER))) = None,
            weight=None,
            exponent=None,
            params: tc.optional(dict) = None,
            name=None,
            prefs: is_pref_set = None,
            **kwargs):

        # IMPLEMENTATION NOTE:
        #     the error_function and learning_function arguments are implemented to preserve the ability to pass
        #     error function and learning function specifications from the specification of a LearningProjection (used
        #     to implement learning for a MappingProjection, e.g., in a tuple) to the LearningMechanism responsible
        #     for implementing the function; and for specifying the default LearningProjection for a Process.
        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(
            error_function=error_function,
            learning_function=learning_function,
            learning_rate=learning_rate,
            # FIX: 10/3/17 - TEST IF THIS OK AND REINSTATE IF SO
            # learning_signal_params=learning_signal_params,
            learning_enabled=learning_enabled,
            weight=weight,
            exponent=exponent,
            params=params)

        # If receiver has not been assigned, defer init to Port.instantiate_projection_to_state()
        if sender is None or receiver is None:
            # Flag for deferred initialization
            self.initialization_status = ContextFlags.DEFERRED_INIT

        super().__init__(sender=sender,
                         receiver=receiver,
                         weight=weight,
                         exponent=exponent,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
Ejemplo n.º 13
0
    def __init__(
            self,
            default_variable=None,
            size=None,
            matrix=HOLLOW_MATRIX,
            # metric:is_distance_metric=ENERGY,
            metric: tc.any(tc.enum(ENERGY, ENTROPY),
                           is_distance_metric) = ENERGY,
            transfer_fct: tc.optional(tc.any(function_type,
                                             method_type)) = None,
            normalize: bool = False,
            params=None,
            owner=None,
            prefs: is_pref_set = None):

        if size:
            if default_variable is None:
                default_variable = np.zeros(size)
            elif size != len(default_variable):
                raise FunctionError(
                    f"Both {repr(DEFAULT_VARIABLE)} ({default_variable}) and {repr(SIZE)} ({size}) "
                    f"are specified for {self.name} but are {SIZE}!=len({DEFAULT_VARIABLE})."
                )

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(matrix=matrix,
                                                  metric=metric,
                                                  transfer_fct=transfer_fct,
                                                  normalize=normalize,
                                                  params=params)

        super().__init__(
            default_variable=default_variable,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        # MODIFIED 6/12/19 NEW: [JDC]
        self._default_variable_flexibility = DefaultsFlexibility.FLEXIBLE
Ejemplo n.º 14
0
    def __init__(
            self,
            default_variable=None,
            size=None,
            matrix=None,
            # metric:is_distance_metric=None,
            metric: tc.optional(
                tc.any(tc.enum(ENERGY, ENTROPY), is_distance_metric)) = None,
            transfer_fct: tc.optional(
                tc.optional(tc.any(types.FunctionType,
                                   types.MethodType))) = None,
            normalize: tc.optional(bool) = None,
            params=None,
            owner=None,
            prefs: tc.optional(is_pref_set) = None):

        if size:
            if default_variable is None:
                default_variable = np.zeros(size)
            elif size != len(default_variable):
                raise FunctionError(
                    f"Both {repr(DEFAULT_VARIABLE)} ({default_variable}) and {repr(SIZE)} ({size}) "
                    f"are specified for {self.name} but are {SIZE}!=len({DEFAULT_VARIABLE})."
                )

        super().__init__(
            default_variable=default_variable,
            matrix=matrix,
            metric=metric,
            transfer_fct=transfer_fct,
            normalize=normalize,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        # MODIFIED 6/12/19 NEW: [JDC]
        self._variable_shape_flexibility = DefaultsFlexibility.FLEXIBLE
Ejemplo n.º 15
0
    def __init__(self,
                 sender=None,
                 receiver=None,
                 matrix=DEFAULT_MATRIX,
                 mask: tc.optional(
                     tc.any(int, float, list, np.ndarray, np.matrix)) = None,
                 mask_operation: tc.enum(ADD, MULTIPLY,
                                         EXPONENTIATE) = MULTIPLY,
                 function=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):

        super().__init__(sender=sender,
                         receiver=receiver,
                         mask=mask,
                         mask_operation=mask_operation,
                         matrix=matrix,
                         function=function,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
 def foo(x: tc.optional(tc.enum(1, 2)) = 2):
     return x
 def foo(x: tc.enum(1) = 2):
     pass
 def bar(*, x: tc.enum(None)) -> tc.enum():
     return x
 def foo(x: tc.enum(int, 1)) -> tc.enum(1, int):
     return x
Ejemplo n.º 20
0
 def foo(x: tc.optional(tc.enum(1, 2)) = 2):
     return x
Ejemplo n.º 21
0
def run(object,
        inputs,
        num_trials: tc.optional(int) = None,
        reset_clock: bool = True,
        initialize: bool = False,
        initial_values: tc.optional(tc.any(list, dict, np.ndarray)) = None,
        targets: tc.optional(tc.any(list, dict, np.ndarray,
                                    function_type)) = None,
        learning: tc.optional(bool) = None,
        call_before_trial: tc.optional(callable) = None,
        call_after_trial: tc.optional(callable) = None,
        call_before_time_step: tc.optional(callable) = None,
        call_after_time_step: tc.optional(callable) = None,
        clock=CentralClock,
        time_scale: tc.optional(tc.enum(TimeScale.TRIAL,
                                        TimeScale.TIME_STEP)) = None,
        termination_processing=None,
        termination_learning=None,
        context=None):
    """run(                      \
    inputs,                      \
    num_trials=None,             \
    reset_clock=True,            \
    initialize=False,            \
    intial_values=None,          \
    targets=None,                \
    learning=None,               \
    call_before_trial=None,      \
    call_after_trial=None,       \
    call_before_time_step=None,  \
    call_after_time_step=None,   \
    clock=CentralClock,          \
    time_scale=None)

    Run a sequence of executions for a `Process` or `System`.

    COMMENT:
        First, validate inputs (and targets, if learning is enabled).  Then, for each `TRIAL`:
            * call call_before_trial if specified;
            * for each time_step in the trial:
                * call call_before_time_step if specified;
                * call ``object.execute`` with inputs, and append result to ``object.results``;
                * call call_after_time_step if specified;
            * call call_after_trial if specified.
        Return ``object.results``.

        The inputs argument must be a list or an np.ndarray array of the appropriate dimensionality:
            * the inner-most dimension must equal the length of object.instance_defaults.variable (i.e., the input to the object);
            * for Mechanism format, the length of the value of all entries must be equal (== number of executions);
            * the outer-most dimension is the number of input sets (num_input_sets) specified (one per execution)
                Note: num_input_sets need not equal num_trials (the number of executions to actually run)
                      if num_trials > num_input_sets:
                          executions will cycle through input_sets, with the final one being only a partial cycle
                      if num_trials < num_input_sets:
                          the executions will only partially sample the input sets
    COMMENT

   Arguments
   ---------

    inputs : List[input] or ndarray(input) : default default_variable for a single `TRIAL`
        the input for each `TRIAL` in a sequence (see `Run_Inputs` for detailed description of formatting
        requirements and options).

    num_trials : int : default None
        the number of `TRIAL` \\s to run.  If it is `None` (the default), then a number of `TRIAL` \\s run will be equal
        equal to the number of items specified in the **inputs** argument.  If **num_trials** exceeds the number of
        inputs, then the inputs will be cycled until the number of `TRIAL` \\s specified have been run.

    reset_clock : bool : default True
        if `True`, resets `CentralClock` to 0 before a sequence of `TRIAL` \\s.

    initialize : bool default False
        calls the `initialize <System.initialize>` method of the System prior to the first `TRIAL`.

    initial_values : Dict[Mechanism:List[input]], List[input] or np.ndarray(input) : default None
        the initial values assigned to Mechanisms designated as `INITIALIZE_CYCLE`.

    targets : List[input] or np.ndarray(input) : default None
        the target values assigned to the `ComparatorMechanism` for each `TRIAL` (used for learning).
        The length must be equal to **inputs**.

    learning : bool :  default None
        enables or disables learning during execution for a `Process <Process_Execution_Learning>` or
        `System <System_Execution_Learning>`.  If it is not specified, the current state of learning is left intact.
        If it is `True`, learning is forced on; if it is `False`, learning is forced off.

    call_before_trial : Function : default= `None`
        called before each `TRIAL` in the sequence is run.

    call_after_trial : Function : default= `None`
        called after each `TRIAL` in the sequence is run.

    call_before_time_step : Function : default= ``None`
        called before each `TIME_STEP` is executed.

    call_after_time_step : Function : default= `None`
        called after each `TIME_STEP` is executed.

   Returns
   -------

    <object>.results : List[OutputState.value]
        list of the values, for each `TRIAL`, of the OutputStates for a Mechanism run directly,
        or of the OutputStates of the `TERMINAL` Mechanisms for the Process or System run.
    """

    # small version of 'sequence' format in the once case where it was still working (single origin mechanism)
    if isinstance(inputs, (list, np.ndarray)):
        if len(object.origin_mechanisms) == 1:
            inputs = {object.origin_mechanisms[0]: inputs}
        else:
            raise RunError(
                "Inputs to {} must be specified in a dictionary with a key for each of its {} origin "
                "mechanisms.".format(object.name,
                                     len(object.origin_mechanisms)))
    elif not isinstance(inputs, dict):
        if len(object.origin_mechanisms) == 1:
            raise RunError(
                "Inputs to {} must be specified in a list or in a dictionary with the origin mechanism({}) "
                "as its only key".format(object.name,
                                         object.origin_mechanisms[0].name))
        else:
            raise RunError(
                "Inputs to {} must be specified in a dictionary with a key for each of its {} origin "
                "mechanisms.".format(object.name,
                                     len(object.origin_mechanisms)))

    inputs, num_inputs_sets = _adjust_stimulus_dict(object, inputs)

    num_trials = num_trials or num_inputs_sets  # num_trials may be provided by user, otherwise = # of input sets

    if targets:
        if isinstance(targets, dict):
            targets = _adjust_target_dict(object, targets)
        elif not isinstance(targets, function_type):
            raise RunError(
                "Targets for {} must be a dictionary or function.".format(
                    object.name))
        _validate_targets(object, targets, num_inputs_sets, context=context)

    object_type = _get_object_type(object)

    object.targets = targets

    time_scale = time_scale or TimeScale.TRIAL

    # SET LEARNING (if relevant)
    # FIX: THIS NEEDS TO BE DONE FOR EACH PROCESS IF THIS CALL TO run() IS FOR SYSTEM
    #      IMPLEMENT learning_enabled FOR SYSTEM, WHICH FORCES LEARNING OF PROCESSES WHEN SYSTEM EXECUTES?
    #      OR MAKE LEARNING A PARAM THAT IS PASSED IN execute
    # If learning is specified, buffer current state and set to specified state
    if learning is not None:
        try:
            learning_state_buffer = object._learning_enabled
        except AttributeError:
            if object.verbosePref:
                warnings.warn("WARNING: learning not enabled for {}".format(
                    object.name))
        else:
            if learning is True:
                object._learning_enabled = True

            elif learning is False:
                object._learning_enabled = False

    # SET LEARNING_RATE, if specified, for all learningProjections in process or system
    if object.learning_rate is not None:
        from psyneulink.components.projections.modulatory.learningprojection import LearningProjection
        for learning_mech in object.learning_mechanisms.mechanisms:
            for projection in learning_mech.output_state.efferents:
                if isinstance(projection, LearningProjection):
                    projection.function_object.learning_rate = object.learning_rate

    # Class-specific validation:
    context = context or RUN + "validating " + object.name

    # INITIALIZATION
    if reset_clock:
        clock.trial = 0
        clock.time_step = 0
    if initialize:
        object.initialize()

    # SET UP TIMING
    if object_type == MECHANISM:
        time_steps = 1
    else:
        time_steps = object.numPhases

    # EXECUTE
    execution_inputs = {}
    for execution in range(num_trials):

        execution_id = _get_unique_id()

        if call_before_trial:
            call_before_trial()

        for time_step in range(time_steps):

            if call_before_time_step:
                call_before_time_step()

            input_num = execution % num_inputs_sets

            for mech in inputs:
                execution_inputs[mech] = inputs[mech][input_num]
            if object_type == SYSTEM:
                object.inputs = execution_inputs

            # Assign targets:
            if targets is not None:

                if isinstance(targets, function_type):
                    object.target = targets

                # IMPLEMENTATION NOTE:  USE input_num since # of inputs must equal # targets,
                #                       whereas targets can be assigned a function (so can't be used to generated #)
                elif object_type == PROCESS:
                    # object.target = targets[input_num][time_step]
                    object.target = targets[input_num][time_step]

                elif object_type == SYSTEM:
                    object.current_targets = targets[input_num]
            # MODIFIED 3/16/17 END
            if RUN in context and not EVC_SIMULATION in context:
                context = RUN + ": EXECUTING " + object_type.upper(
                ) + " " + object.name
                object.execution_status = ExecutionStatus.EXECUTING
            result = object.execute(
                input=execution_inputs,
                execution_id=execution_id,
                clock=clock,
                time_scale=time_scale,
                termination_processing=termination_processing,
                termination_learning=termination_learning,
                context=context)

            if call_after_time_step:
                call_after_time_step()

            clock.time_step += 1

        # object.results.append(result)
        if isinstance(result, Iterable):
            result_copy = result.copy()
        else:
            result_copy = result
        object.results.append(result_copy)

        if call_after_trial:
            call_after_trial()

        clock.trial += 1

    # Restore learning state
    try:
        learning_state_buffer
    except UnboundLocalError:
        pass
    else:
        object._learning_enabled = learning_state_buffer

    return object.results
Ejemplo n.º 22
0
 def foo(x: tc.enum(int, 1)) -> tc.enum(1, int):
     return x
Ejemplo n.º 23
0
 def bar(*, x: tc.enum(None)) -> tc.enum():
     return x
Ejemplo n.º 24
0
 def bar(x: tc.any((int, float), tc.re("^foo$"), tc.enum(b"X", b"Y"))):
     pass
 def bar(x: tc.any((int, float), tc.re("^foo$"), tc.enum(b"X", b"Y"))):
     pass
Ejemplo n.º 26
0
    def __init__(
            self,
            default_variable=None,
            size=None,
            input_states: tc.optional(tc.any(list, dict)) = None,
            function: tc.optional(
                tc.enum(TIME_AVERAGE_INPUT, AVERAGE_INPUTS,
                        INPUT_SEQUENCE)) = TIME_AVERAGE_INPUT,
            initial_value=None,
            # rate:tc.optional(tc.any(int, float))=1.0,
            # noise:tc.optional(tc.any(int, float, callable))=0.0,
            rate: tc.any(int, float) = 1.0,
            noise: tc.any(int, float, callable) = 0.0,
            window_size=1,
            filter_function: tc.optional(callable) = None,
            params=None,
            name=None,
            prefs: is_pref_set = None,
            context=None):

        if not context in {
                ContextFlags.COMPONENT, ContextFlags.COMPOSITION,
                ContextFlags.COMMAND_LINE
        }:
            warnings.warn(
                "PredictionMechanism should not be constructed on its own.  If you insist,"
                "set context=ContextFlags.COMMAND_LINE, but proceed at your peril!"
            )
            return

        if params and FUNCTION in params:
            function = params[FUNCTION]

        input_type = None
        if function in input_types:
            input_type = function

        params = self._assign_args_to_param_dicts(
            window_size=window_size,
            input_type=input_type,
            filter_function=filter_function,
            params=params)

        if function in input_types:

            if function is TIME_AVERAGE_INPUT:
                # Use default for IntegratorMechanism: AdaptiveIntegrator
                function = self.ClassDefaults.function

            elif function in {AVERAGE_INPUTS, INPUT_SEQUENCE}:

                # Maintain the preceding sequence of inputs (of length window_size), and use those for each simulation
                function = Buffer(default_variable=[[0]],
                                  initializer=initial_value,
                                  rate=rate,
                                  noise=noise,
                                  history=self.window_size)

        params.update({FUNCTION_PARAMS: {RATE: rate, NOISE: noise}})

        super().__init__(default_variable=default_variable,
                         size=size,
                         input_states=input_states,
                         function=function,
                         params=params,
                         name=name,
                         prefs=prefs)
Ejemplo n.º 27
0
 def foo(x: tc.enum(1) = 2):
     pass