Example #1
0
 def render(self,
            layout: str = "",
            labels: tc.optional(bool) = None,
            mode: str = "",
            source: str = "",
            size: int = 0,
            font_size: tc.optional(int) = None):
     if layout:
         self.layout = layout
     if labels is not None:
         self.labels = labels
     if mode:
         self.mode = mode
     if source:
         self.source = source
         self.weights = Counter()
     if size:
         self.size = size
     if font_size is not None:
         self.font_size = font_size
     if self.mode == "simple":
         self.node_multiplier = 0.7
     elif self.mode == "full":
         self.node_multiplier = 20
     elif self.mode == "reduced-structure":
         self.node_multiplier = 4
     elif self.mode == "simple-structure":
         self.node_multiplier = 4
         self.font_size = 10
     elif self.mode == "full-structure":
         self.node_multiplier = 6
         self.font_size = 8
     if not self.weights:
         self.finder.run_script(self.source)
     self.draw()
Example #2
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 function=Logistic,
                 matrix=None,
                 auto: is_numeric_or_none = None,
                 hetero: is_numeric_or_none = None,
                 integrator_function=AdaptiveIntegrator,
                 initial_value=None,
                 noise: is_numeric_or_none = 0.0,
                 integration_rate: is_numeric_or_none = 0.5,
                 integrator_mode=False,
                 k_value: is_numeric_or_none = 0.5,
                 threshold: is_numeric_or_none = 0,
                 ratio: is_numeric_or_none = 0.5,
                 average_based=False,
                 inhibition_only=True,
                 clip=None,
                 input_ports: tc.optional(tc.any(list, dict)) = None,
                 output_ports: tc.optional(tc.any(str, Iterable)) = RESULT,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):
        # Default output_ports is specified in constructor as a string rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if output_ports is None:
            output_ports = [RESULT]

        # this defaults the matrix to be an identity matrix (self excitation)
        if matrix is None:
            if auto is None:
                auto = 5  # this value is bad: there should be a better way to estimate this?
            if hetero is None:
                hetero = 0

        super().__init__(default_variable=default_variable,
                         size=size,
                         input_ports=input_ports,
                         function=function,
                         matrix=matrix,
                         auto=auto,
                         hetero=hetero,
                         integrator_function=integrator_function,
                         integrator_mode=integrator_mode,
                         k_value=k_value,
                         threshold=threshold,
                         ratio=ratio,
                         inhibition_only=inhibition_only,
                         average_based=average_based,
                         initial_value=initial_value,
                         noise=noise,
                         integration_rate=integration_rate,
                         clip=clip,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
    def __init__(
            self,
            default_variable=None,
            size=None,
            function=Linear,
            # selection_function=OneHot(mode=MAX_INDICATOR),  # RE-INSTATE WHEN IMPLEMENT NHot function
            integrator_function=AdaptiveIntegrator,
            initial_value=None,
            noise: is_numeric_or_none = 0.0,
            integration_rate: is_numeric_or_none = 0.5,
            integrator_mode=False,
            clip=None,
            enable_learning=True,
            learning_rate: tc.optional(tc.any(parameter_spec, bool)) = None,
            learning_function: is_function_type = Kohonen(
                distance_function=GAUSSIAN),
            learned_projection: tc.optional(MappingProjection) = None,
            additional_output_ports: tc.optional(tc.any(str, Iterable)) = None,
            name=None,
            prefs: is_pref_set = None,
            **kwargs):
        # # Default output_ports is specified in constructor as a string rather than a list
        # # to avoid "gotcha" associated with mutable default arguments
        # # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        # if output_ports is None:
        #     output_ports = [RESULT]

        output_ports = [
            RESULT, {
                NAME: INPUT_PATTERN,
                VARIABLE: OWNER_VARIABLE
            }
        ]
        if additional_output_ports:
            if isinstance(additional_output_ports, list):
                output_ports += additional_output_ports
            else:
                output_ports.append(additional_output_ports)

        self._learning_enabled = enable_learning
        self._learning_enable_deferred = False

        super().__init__(default_variable=default_variable,
                         size=size,
                         function=function,
                         integrator_function=integrator_function,
                         integrator_mode=integrator_mode,
                         learning_rate=learning_rate,
                         learning_function=learning_function,
                         learned_projection=learned_projection,
                         enable_learning=enable_learning,
                         initial_value=initial_value,
                         noise=noise,
                         integration_rate=integration_rate,
                         clip=clip,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs)
Example #4
0
    def __init__(
            self,
            system: tc.optional(System_Base) = None,
            monitored_output_ports=None,
            function=None,
            # control_signals:tc.optional(list) = None,
            control_signals=None,
            modulation: tc.optional(str) = MULTIPLICATIVE,
            params=None,
            name=None,
            prefs: is_pref_set = None):

        super().__init__(
            system=system,
            objective_mechanism=ObjectiveMechanism(
                monitored_output_ports=monitored_output_ports,
                function=DualAdaptiveIntegrator),
            control_signals=control_signals,
            modulation=modulation,
            params=params,
            name=name,
            prefs=prefs,
        )

        self.objective_mechanism.name = self.name + '_ObjectiveMechanism'
        self.objective_mechanism._role = CONTROL
    def __init__(
            self,
            system: tc.optional(System_Base) = None,
            monitored_output_states=None,
            function=Linear(slope=1, intercept=0),
            # control_signals:tc.optional(list) = None,
            control_signals=None,
            modulation: tc.optional(
                _is_modulation_param) = ModulationParam.MULTIPLICATIVE,
            params=None,
            name=None,
            prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(
            function=function, control_signals=control_signals, params=params)

        super().__init__(system=system,
                         objective_mechanism=ObjectiveMechanism(
                             monitored_output_states=monitored_output_states,
                             function=AGTUtilityIntegrator),
                         control_signals=control_signals,
                         modulation=modulation,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=ContextFlags.CONSTRUCTOR)

        self.objective_mechanism.name = self.name + '_ObjectiveMechanism'
        self.objective_mechanism._role = CONTROL
Example #6
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 output_states: tc.optional(tc.any(str, Iterable)) = None,
                 function=Linear,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  params=params)

        super(ProcessingMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             input_states=input_states,
                             function=function,
                             output_states=output_states,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
    def __init__(
            self,
            system: tc.optional(System_Base) = None,
            monitored_output_states=None,
            function=None,
            # control_signals:tc.optional(list) = None,
            control_signals=None,
            modulation: tc.optional(
                _is_modulation_param) = ModulationParam.MULTIPLICATIVE,
            params=None,
            name=None,
            prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(
            function=function, control_signals=control_signals, params=params)

        super().__init__(
            system=system,
            objective_mechanism=ObjectiveMechanism(
                monitored_output_states=monitored_output_states,
                function=DualAdaptiveIntegrator),
            control_signals=control_signals,
            modulation=modulation,
            params=params,
            name=name,
            prefs=prefs,
        )

        self.objective_mechanism.name = self.name + '_ObjectiveMechanism'
        self.objective_mechanism._role = CONTROL
Example #8
0
def end_request(success: optional(bool), request: optional(Request) = None):

    outcome = success and "success" or "failure"
    request = request or pmnc.request

    response_ms = int(request.elapsed * 1000)
    pmnc.performance.sample("interface.{0:s}.response_time.{1:s}".\
                            format(request.interface, outcome), response_ms)
    pmnc.performance.event("interface.{0:s}.response_rate.{1:s}".\
                           format(request.interface, outcome))

    request_count = _request_factory.destroyed(
    )  # we don't care exactly which request is being destroyed

    active_requests = request_count > 0 and ", {0:d} request(s) are still active".format(
        request_count) or ""
    request_description = "{0:s} ".format(
        request.description) if request is not current_thread(
        )._request else ""
    request_outcome = "ends with {0:s}".format(
        outcome) if success is not None else "is being abandoned"

    if pmnc.log.noise:
        pmnc.log.noise("request {0:s}{1:s}{2:s}".\
                       format(request_description, request_outcome, active_requests))
Example #9
0
    def __init__(self,
                 default_gating_policy=None,
                 size=None,
                 function=Linear(slope=1, intercept=0),
                 gating_signals: tc.optional(list) = None,
                 modulation: tc.optional(
                     _is_modulation_param) = ModulationParam.MULTIPLICATIVE,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=None):

        # self.system = None

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(
            gating_signals=gating_signals, function=function, params=params)

        super().__init__(variable=default_gating_policy,
                         size=size,
                         modulation=modulation,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=self)
Example #10
0
    def __init__(self,
                 default_variable: tc.any(list, np.ndarray),
                 size=None,
                 function: tc.optional(is_function_type) = None,
                 learning_signals: tc.optional(tc.optional(list)) = None,
                 modulation: tc.optional(str) = None,
                 learning_rate: tc.optional(parameter_spec) = None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs):

        # # USE FOR IMPLEMENTATION OF deferred_init()
        # # Store args for deferred initialization
        # self._init_args = locals().copy()
        # self._init_args['context'] = self
        # self._init_args['name'] = name

        # # Flag for deferred initialization
        # self.initialization_status = ContextFlags.DEFERRED_INIT
        # self.initialization_status = ContextFlags.DEFERRED_INIT

        # self._learning_rate = learning_rate

        super().__init__(default_variable=default_variable,
                         size=size,
                         function=function,
                         modulation=modulation,
                         learning_rate=learning_rate,
                         params=params,
                         name=name,
                         prefs=prefs,
                         learning_signals=learning_signals,
                         **kwargs)
 def render(self, layout: str="", labels: tc.optional(bool)=None,
            mode: str="", source: str="", size: int=0,
            font_size: tc.optional(int)=None):
     if layout:
         self.layout = layout
     if labels is not None:
         self.labels = labels
     if mode:
         self.mode = mode
     if source:
         self.source = source
         self.weights = Counter()
     if size:
         self.size = size
     if font_size is not None:
         self.font_size = font_size
     if self.mode == "simple":
         self.node_multiplier = 0.7
     elif self.mode == "full":
         self.node_multiplier = 20
     elif self.mode == "reduced-structure":
         self.node_multiplier = 4
     elif self.mode == "simple-structure":
         self.node_multiplier = 4
         self.font_size = 10
     elif self.mode == "full-structure":
         self.node_multiplier = 6
         self.font_size = 8
     if not self.weights:
         self.finder.run_script(self.source)
     self.draw()
Example #12
0
    def __init__(self,
                 sample: tc.optional(tc.any(OutputPort, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputPort, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_ports: tc.optional(tc.any(str, Iterable)) = None,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 **kwargs
                 ):

        input_ports = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_ports=input_ports,
                         function=function,
                         output_ports=output_ports,
                         params=params,
                         name=name,
                         prefs=prefs,
                         **kwargs
                         )
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(tc.any(list, dict)) = None,
                 function=None,
                 output_states: tc.optional(tc.any(str, Iterable)) = RESULTS,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None):
        """Assign type-level preferences, default input value (SigmoidLayer_DEFAULT_BIAS) and call super.__init__
        """

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(input_states=input_states,
                                                  output_states=output_states,
                                                  function=function,
                                                  params=params)

        super(IntegratorMechanism,
              self).__init__(default_variable=default_variable,
                             size=size,
                             function=function,
                             params=params,
                             name=name,
                             prefs=prefs,
                             context=ContextFlags.CONSTRUCTOR)
Example #14
0
    def __init__(self,
                 default_variable=None,
                 mode: tc.optional(
                     tc.enum(MAX_VAL, MAX_ABS_VAL, MAX_INDICATOR,
                             MAX_ABS_INDICATOR, MIN_VAL, MIN_ABS_VAL,
                             MIN_INDICATOR, MIN_ABS_INDICATOR, PROB,
                             PROB_INDICATOR)) = None,
                 seed=None,
                 params=None,
                 owner=None,
                 prefs: tc.optional(is_pref_set) = None):

        reset_variable_shape_flexibility = False
        if mode in {PROB, PROB_INDICATOR} and default_variable is None:
            default_variable = [[0], [0]]
            reset_variable_shape_flexibility = True

        super().__init__(
            default_variable=default_variable,
            mode=mode,
            seed=seed,
            params=params,
            owner=owner,
            prefs=prefs,
        )

        if reset_variable_shape_flexibility:
            self._variable_shape_flexibility = DefaultsFlexibility.FLEXIBLE
Example #15
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_ports: tc.optional(tc.optional(tc.any(Iterable, Mechanism, OutputPort, InputPort))) = None,
                 function=None,
                 composition=None,
                 port_map=None,
                 params=None,
                 name=None,
                 prefs:is_pref_set=None):

        if default_variable is None and size is None:
            default_variable = self.class_defaults.variable
        self.composition = composition
        self.port_map = port_map
        self.connected_to_composition = False
        self.user_added_ports = {
            INPUT_PORTS: set(),
            OUTPUT_PORTS: set()
        }
        super(CompositionInterfaceMechanism, self).__init__(default_variable=default_variable,
                                                            size=size,
                                                            input_ports=input_ports,
                                                            function=function,
                                                            params=params,
                                                            name=name,
                                                            prefs=prefs,
                                                            )
Example #16
0
    def __init__(
            self,
            default_variable=None,
            size=None,
            # monitor_for_control:tc.optional(list)=None,
            mode: tc.optional(float) = 0.0,
            modulated_mechanisms: tc.optional(tc.any(list, str)) = None,
            modulation: tc.optional(
                _is_modulation_param) = ModulationParam.MULTIPLICATIVE,
            params=None,
            name=None,
            prefs: is_pref_set = None):

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(
            mode=mode,
            modulated_mechanisms=modulated_mechanisms,
            params=params)

        super().__init__(
            default_variable=default_variable,
            size=size,
            # monitor_for_control=monitor_for_control,
            modulation=modulation,
            params=params,
            name=name,
            prefs=prefs,
            context=ContextFlags.CONSTRUCTOR)
Example #17
0
    def __init__(self,
                 sample: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 target: tc.optional(tc.any(OutputState, Mechanism_Base, dict,
                                            is_numeric,
                                            str)) = None,
                 function=PredictionErrorDeltaFunction(),
                 output_states: tc.optional(tc.any(str, Iterable)) = OUTCOME,
                 learning_rate: is_numeric = 0.3,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        input_states = [sample, target]
        params = self._assign_args_to_param_dicts(sample=sample,
                                                  target=target,
                                                  function=function,
                                                  input_states=input_states,
                                                  output_states=output_states,
                                                  learning_rate=learning_rate,
                                                  params=params)

        super().__init__(sample=sample,
                         target=target,
                         input_states=input_states,
                         function=function,
                         output_states=output_states,
                         params=params,
                         name=name,
                         prefs=prefs,
                         context=context)
Example #18
0
    def __init__(self,
                 default_variable=None,
                 rate=None,
                 noise=None,
                 initializer=None,
                 params: tc.optional(tc.optional(dict)) = None,
                 owner=None,
                 prefs: tc.optional(is_pref_set) = None,
                 context=None,
                 **kwargs):

        if not hasattr(self, "initializers"):
            self.initializers = ["initializer"]

        if not hasattr(self, "stateful_attributes"):
            self.stateful_attributes = ["previous_value"]

        super().__init__(default_variable=default_variable,
                         rate=rate,
                         initializer=initializer,
                         noise=noise,
                         params=params,
                         owner=owner,
                         prefs=prefs,
                         context=context,
                         **kwargs)
Example #19
0
    def __init__(self, *,
                 timeout: optional(float) = None,
                 interface: optional(str) = None,
                 protocol: optional(str) = None,
                 parameters: optional(dict) = None,
                 description: optional(str) = None):

        # infinite "requests" can only be anonymous, "normal" requests
        # having a deadline must be assigned an interface and protocol

        self._start = time()
        if timeout is not None:
            self._deadline = self._start + timeout
            assert interface is not None and protocol is not None, \
                   "request with deadline from unspecified interface/protocol"
        else:
            self._deadline = None
            assert interface is None and protocol is None, \
                   "infinite request from specific interface/protocol"

        self._interface, self._protocol = interface, protocol
        self._parameters = parameters or {}

        request_time = strftime("%Y%m%d%H%M%S")
        random_id = b2a_hex(urandom(6)).decode("ascii").upper()
        self._unique_id = "RQ-{0:s}-{1:s}".format(request_time, random_id)
        self._description = description
Example #20
0
    def configure_learning(self,
                           learning_function:tc.optional(tc.any(is_function_type))=None,
                           learning_rate:tc.optional(tc.any(numbers.Number, list, np.ndarray, np.matrix))=None,
                           learned_projection:tc.optional(MappingProjection)=None,
                           context=None):
        """Provide user-accessible-interface to _instantiate_learning_mechanism

        Configure KohonenMechanism for learning. Creates the following Components:

        * a `LearningMechanism` -- if the **learning_function** and/or **learning_rate** arguments are
          specified, they are used to construct the LearningMechanism, otherwise the values specified in the
          KohonenMechanism's constructor are used;
        ..
        * a `MappingProjection` from the KohonenMechanism's `primary OutputPort <OutputPort_Primary>`
          to the LearningMechanism's *ACTIVATION_INPUT* InputPort;
        ..
        * a `LearningProjection` from the LearningMechanism's *LEARNING_SIGNAL* OutputPort to the learned_projection;
          by default this is the KohonenMechanism's `learned_projection <KohonenMechanism.learned_projection>`;
          however a different one can be specified.

        """
        # This insures that these are validated if the method is called from the command line (i.e., by the user)
        if learning_function:
            self.learning_function = learning_function
        if learning_rate:
            self.learning_rate = learning_rate
        if learned_projection:
            self.learned_projection = learned_projection

        # Assign learned_projection, using as default the first Projection to the Mechanism's primary InputPort
        try:
            self.learned_projection = self.learned_projection or self.input_port.path_afferents[0]
        except:
            self.learned_projection = None
        if not self.learned_projection:
            # Mechanism already belongs to a Process or System, so should have a MappingProjection by now
            if (self.processes or self.systems):
                raise KohonenError("Configuring learning for {} requires that it receive a {} "
                                   "from another {} within a {} to which it belongs".
                                   format(self.name, MappingProjection.__name__, Mechanism.__name__, Process.__name__))
                                   # "receive at least one {} or that the {} be specified".
                                   # format(self.name, MappingProjection.__name__, repr(LEARNED_PROJECTION)))
            # Mechanism doesn't yet belong to a Process or System, so wait until then to configure learning
            #  (this method will be called again from _add_projection_to_mechanism if a Projection is added)
            else:
                self._learning_enable_deferred = True
                return

        self.parameters.matrix._set(self.learned_projection.parameter_ports[MATRIX], context)

        self.learning_mechanism = self._instantiate_learning_mechanism(learning_function=self.learning_function,
                                                                       learning_rate=self.learning_rate,
                                                                       learned_projection=self.learned_projection,
                                                                       )

        self.learning_projection = self.learning_mechanism.output_ports[LEARNING_SIGNAL].efferents[0]

        if self.learning_mechanism is None:
            self.learning_enabled = False
Example #21
0
 def __init__(self, collection: str, selector: dict, update: dict, *,
              upsert: optional(bool) = False,
              multi_update: optional(bool) = False):
     MongoDB_Request.__init__(self)
     self._collection = collection
     self._selector = selector
     self._update = update
     self._upsert = upsert
     self._multi_update = multi_update
Example #22
0
def enqueue(request: Request,
            f: callable,
            args: optional(tuple) = (),
            kwargs: optional(dict) = {}):

    _request_rate_sampler.tick()

    main_thread_pool = _get_main_thread_pool()
    return main_thread_pool.enqueue(request, f, args, kwargs)
Example #23
0
    def __init__(self, name: str, *,
                 server_address: (str, int),
                 connect_timeout: float,
                 response_timeout: float,
                 ping_interval: optional(float),
                 system_id: str,
                 password: str,
                 system_type: str,
                 esme_ton: byte,
                 esme_npi: byte,
                 esme_addr: str,
                 esme_type: one_of("rcvr", "xmit", "xcvr"),
                 request_timeout: optional(float) = None,
                 **kwargs): # this kwargs allows for extra application-specific
                            # settings in config_interface_smpp_X.py

        self._name = name
        self._response_timeout = response_timeout

        if ping_interval:
            self._ping_timeout = Timeout(ping_interval)
            self._ping_response_timeout = Timeout(response_timeout)
        else:
            self._ping_timeout = self._ping_response_timeout = None
        self._ping_request = None

        self._in_q = InterlockedQueue()
        self._out_q = InterlockedQueue()
        self._inflight = InflightRequests()
        self._ceased = Event()

        if esme_type == "rcvr":
            bind_pdu = BindReceiverPDU
        elif esme_type == "xmit":
            bind_pdu = BindTransmitterPDU
        elif esme_type == "xcvr":
            bind_pdu = BindTransceiverPDU

        self._create_connection = \
            lambda: _SMPPConnection(name, self._in_q, self._out_q, self._inflight,
                                    server_address = server_address,
                                    connect_timeout = connect_timeout,
                                    response_timeout = response_timeout,
                                    system_id = system_id,
                                    password = password,
                                    system_type = system_type,
                                    esme_ton = esme_ton,
                                    esme_npi = esme_npi,
                                    esme_addr = esme_addr,
                                    bind_pdu = bind_pdu)

        self._request_timeout = request_timeout or \
            pmnc.config_interfaces.get("request_timeout") # this is now static

        if pmnc.request.self_test == __name__: # self-test
            self._process_request = kwargs["process_request"]
Example #24
0
 def import_hook(self, name: str, caller: tc.optional(Module)=None,
                 fromlist: tc.optional(list)=None,
                 level: int=-1) -> tc.optional(Module):
     if self.matches(name):
         if caller:
             if self.debug:
                 print(caller.__name__, " -> ", name)
             self.cf_weights[name] += 1
             self.cf_imports[(caller.__name__, name)] = 1
         super().import_hook(name, caller, fromlist, level)
Example #25
0
def fake_request(timeout: optional(float) = None,
                 interface: optional(str) = "__fake__") -> Request:

    if timeout is not None:
        request = Request(timeout = timeout, interface = interface, protocol = "n/a",
                          parameters = dict(auth_tokens = {}))
    else:
        request = InfiniteRequest()

    current_thread()._request = request
    return request
Example #26
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 input_states: tc.optional(
                     tc.any(Iterable, Mechanism, OutputState,
                            InputState)) = None,
                 function=Linear,
                 initial_value=None,
                 noise=0.0,
                 time_constant=1.0,
                 integrator_mode=False,
                 clip=None,
                 output_states: tc.optional(tc.any(str, Iterable)) = RESULTS,
                 time_scale=TimeScale.TRIAL,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=componentType + INITIALIZING):
        """Assign type-level preferences and call super.__init__
        """

        # Default output_states is specified in constructor as a string rather than a list
        # to avoid "gotcha" associated with mutable default arguments
        # (see: bit.ly/2uID3s3 and http://docs.python-guide.org/en/latest/writing/gotchas/)
        if output_states is None or output_states is RESULTS:
            output_states = [RESULTS]

        params = self._assign_args_to_param_dicts(
            function=function,
            initial_value=initial_value,
            input_states=input_states,
            output_states=output_states,
            noise=noise,
            time_constant=time_constant,
            integrator_mode=integrator_mode,
            time_scale=time_scale,
            clip=clip,
            params=params)

        self.integrator_function = None

        if not isinstance(self.standard_output_states, StandardOutputStates):
            self.standard_output_states = StandardOutputStates(
                self, self.standard_output_states, indices=PRIMARY)

        super(TransferMechanism, self).__init__(
            variable=default_variable,
            size=size,
            params=params,
            name=name,
            prefs=prefs,
            context=self,
            input_states=input_states,
        )
Example #27
0
    def __init__(self,
                 default_variable=None,
                 size=None,
                 function=None,
                 matrix=None,
                 auto: is_numeric_or_none=None,
                 hetero: is_numeric_or_none=None,
                 integrator_function=None,
                 initial_value=None,
                 noise: tc.optional(is_numeric_or_none) = None,
                 integration_rate: tc.optional(is_numeric_or_none) = None,
                 integrator_mode=None,
                 k_value: tc.optional(is_numeric_or_none) = None,
                 threshold: tc.optional(is_numeric_or_none) = None,
                 ratio: tc.optional(is_numeric_or_none) = None,
                 average_based=None,
                 inhibition_only=None,
                 clip=None,
                 input_ports:tc.optional(tc.optional(tc.any(list, dict))) = None,
                 output_ports:tc.optional(tc.any(str, Iterable))=None,
                 params=None,
                 name=None,
                 prefs: tc.optional(is_pref_set) = None,
                 **kwargs
                 ):
        # this defaults the matrix to be an identity matrix (self excitation)
        if matrix is None:
            if auto is None:
                auto = 5 # this value is bad: there should be a better way to estimate this?
            if hetero is None:
                hetero = 0

        super().__init__(
            default_variable=default_variable,
            size=size,
            input_ports=input_ports,
            function=function,
            matrix=matrix,
            auto=auto,
            hetero=hetero,
            integrator_function=integrator_function,
            integrator_mode=integrator_mode,
            k_value=k_value,
            threshold=threshold,
            ratio=ratio,
            inhibition_only=inhibition_only,
            average_based=average_based,
            initial_value=initial_value,
            noise=noise,
            integration_rate=integration_rate,
            clip=clip,
            output_ports=output_ports,
            params=params,
            name=name,
            prefs=prefs,
            **kwargs
        )
Example #28
0
def get_private_thread_pool(pool_name: optional(str) = None,
                            pool_size: optional(int) = None,
                            *, __source_module_name) -> ThreadPool:

    pool_name = "{0:s}{1:s}".format(__source_module_name,
                                    pool_name is not None and "/{0:s}".format(pool_name) or "")
    with _pools_lock:

        if pool_name not in _private_pools:
            pool_size = pool_size or pmnc.config_interfaces.get("thread_count")
            _private_pools[pool_name] = ThreadPool(pool_name, pool_size)

        return _private_pools[pool_name]
Example #29
0
def get_private_thread_pool(pool_name: optional(str) = None,
                            pool_size: optional(int) = None,
                            *, __source_module_name) -> ThreadPool:

    pool_name = "{0:s}{1:s}".format(__source_module_name,
                                    pool_name is not None and "/{0:s}".format(pool_name) or "")
    with _pools_lock:

        if pool_name not in _private_pools:
            pool_size = pool_size or pmnc.config_interfaces.get("thread_count")
            _private_pools[pool_name] = ThreadPool(pool_name, pool_size)

        return _private_pools[pool_name]
Example #30
0
def fake_request(timeout: optional(float) = None,
                 interface: optional(str) = "__fake__") -> Request:

    if timeout is not None:
        request = Request(timeout=timeout,
                          interface=interface,
                          protocol="n/a",
                          parameters=dict(auth_tokens={}))
    else:
        request = InfiniteRequest()

    current_thread()._request = request
    return request
Example #31
0
 def begin_work(self, timeout: optional(float) = None) -> (bool, optional(int)):
     timeout = Timeout(timeout or self._idle_timeout + 1.0)
     while not timeout.expired:
         if current_thread().stopped():
             return True, None
         self._signal.wait(min(timeout.remain, 3.0)) # this may spend waiting slightly less, but it's ok
         with self._lock:
             for i, source in enumerate(self._sources):
                 if source.begin_work():
                     return False, i
             else:
                 self._signal.clear()
     else:
         return False, None
Example #32
0
    def __init__(self,
                 owner=None,
                 reference_value=None,
                 variable=None,
                 size=None,
                 index=PRIMARY,
                 assign=None,
                 function=Linear(),
                 learning_rate: tc.optional(parameter_spec) = None,
                 modulation: tc.optional(_is_modulation_param) = None,
                 projections=None,
                 params=None,
                 name=None,
                 prefs: is_pref_set = None,
                 context=None):

        if context is None:
            context = ContextFlags.COMMAND_LINE
            self.context.source = ContextFlags.COMMAND_LINE
        else:
            context = ContextFlags.CONSTRUCTOR
            self.context.source = ContextFlags.CONSTRUCTOR

        # Assign args to params and functionParams dicts
        params = self._assign_args_to_param_dicts(function=function,
                                                  learning_rate=learning_rate,
                                                  params=params)

        # FIX: 5/26/16
        # IMPLEMENTATION NOTE:
        # Consider adding self to owner.output_states here (and removing from LearningProjection._instantiate_sender)
        #  (test for it, and create if necessary, as per OutputStates in LearningProjection._instantiate_sender),

        # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults
        super().__init__(
            owner=owner,
            reference_value=reference_value,
            variable=variable,
            size=size,
            modulation=modulation,
            index=index,
            assign=None,
            projections=projections,
            params=params,
            name=name,
            prefs=prefs,
            context=context,
            function=function,
        )
 def __init__(self,
              default_variable=None,
              metric: tc.optional(DistanceMetrics._is_metric) = None,
              normalize: tc.optional(bool) = None,
              params=None,
              owner=None,
              prefs: tc.optional(is_pref_set) = None):
     super().__init__(
         default_variable=default_variable,
         metric=metric,
         normalize=normalize,
         params=params,
         owner=owner,
         prefs=prefs,
     )
Example #34
0
    def __init__(self,
                 *,
                 server_address: (str, int),
                 auth_database: str,
                 username: optional(str) = None,
                 password: optional(str) = None):

        self._server_address = server_address
        self._auth_database = auth_database
        self._username = username or ""
        self._password = password or ""

        self._read_offset = 0
        self._read_buffer = BytesIO()
        self._read_ahead = 16384
    def __init__(self,
                 sender=None,
                 receiver=None,
                 weight=None,
                 exponent=None,
                 function=Linear,
                 control_signal_params:tc.optional(dict)=None,
                 params=None,
                 name=None,
                 prefs:is_pref_set=None):

        # Assign args to params and functionParams dicts (kwConstants must == arg names)
        params = self._assign_args_to_param_dicts(function=function,
                                                  control_signal_params=control_signal_params,
                                                  params=params)

        # If receiver has not been assigned, defer init to State.instantiate_projection_to_state()
        if (sender is None or sender.context.initialization_status == ContextFlags.DEFERRED_INIT or
                inspect.isclass(receiver) or receiver is None or
                    receiver.context.initialization_status == ContextFlags.DEFERRED_INIT):
            self.context.initialization_status = ContextFlags.DEFERRED_INIT

        # Validate sender (as variable) and params, and assign to variable and paramInstanceDefaults
        # Note: pass name of mechanism (to override assignment of componentName in super.__init__)
        # super(ControlSignal_Base, self).__init__(sender=sender,
        super(ControlProjection, self).__init__(sender=sender,
                                                receiver=receiver,
                                                weight=weight,
                                                exponent=exponent,
                                                function=function,
                                                params=params,
                                                name=name,
                                                prefs=prefs,
                                                context=ContextFlags.CONSTRUCTOR)
def _collapsed_bar(s: optional((int, int))) -> str:

    if s is None:
        return " "

    low, high = s; assert 0 <= low <= high <= 39
    return box_chars[high // 5]
Example #37
0
 def locate(self, module_name: by_regex("^[A-Za-z0-9_-]{1,128}\\.pyc?$")) -> optional(os_path.isfile):
     if module_name in self._listdir(self._cage_directory):
         return os_path.join(self._cage_directory, module_name)
     elif self._shared_directory and module_name in self._listdir(self._shared_directory):
         return os_path.join(self._shared_directory, module_name)
     else:
         return None # not found
Example #38
0
def execute_async(target_cage: valid_cage_name, module: valid_module_name,
                  method: valid_method_name, args: tuple, kwargs: dict, *,
                  queue: optional(valid_queue_name) = None, id: optional(str) = None,
                  expires_in: optional(float) = None) -> valid_retry_id:

    # if the caller used deprecated syntax pmnc("target_cage:retry"),
    # the name of the queue is set to None meaning "default queue"

    if queue is None: # in which case the configured default value is used
        queue = pmnc.config.get("retry_queue")

    # execution of asynchronous calls is done through
    # a special resource "pmnc" of protocol "retry"

    xa = pmnc.transaction.create()
    xa.pmnc(target_cage, queue = queue, id = id, expires_in = expires_in).\
       __getattr__(module).__getattr__(method)(*args, **kwargs)
    return xa.execute()[0]
Example #39
0
def get_queue(
    name: str,
    *,
    __source_module_name,
    re_len: int,
    pagesize: optional(int) = 32768,
    q_extentsize: optional(int) = 128,
    re_pad: optional(int) = 0x00,
    **db_opts
):

    db_opts["re_len"] = re_len
    db_opts["pagesize"] = pagesize
    db_opts["q_extentsize"] = q_extentsize
    db_opts["re_pad"] = re_pad

    module_state = _get_module_state(__source_module_name)
    return module_state.get_queue_db(name, **db_opts)
Example #40
0
    def __init__(self, source_module_name, *,
                 accept: optional(callable) = None,
                 sync_commit: optional(bool) = True,
                 **options):

        self._source_module_name = source_module_name
        self._accept = accept or self._default_accept
        self._sync_commit = sync_commit
        self._options = options

        transaction_time = strftime("%Y%m%d%H%M%S")
        random_id = b2a_hex(urandom(6)).decode("ascii").upper()
        self._xid = "XA-{0:s}-{1:s}".format(transaction_time, random_id)
        self._details = None

        self._resources, self._results = [], InterlockedQueue()
        self._decision, self._commit = Event(), Event()
        self._transaction_rate_sampler.tick()
Example #41
0
def end_request(success: optional(bool), request: optional(Request) = None):

    outcome = success and "success" or "failure"
    request = request or pmnc.request

    response_ms = int(request.elapsed * 1000)
    pmnc.performance.sample("interface.{0:s}.response_time.{1:s}".\
                            format(request.interface, outcome), response_ms)
    pmnc.performance.event("interface.{0:s}.response_rate.{1:s}".\
                           format(request.interface, outcome))

    request_count = _request_factory.destroyed() # we don't care exactly which request is being destroyed

    active_requests = request_count > 0 and ", {0:d} request(s) are still active".format(request_count) or ""
    request_description = "{0:s} ".format(request.description) if request is not current_thread()._request else ""
    request_outcome = "ends with {0:s}".format(outcome) if success is not None else "is being abandoned"

    pmnc.log.debug("request {0:s}{1:s}{2:s}".\
                   format(request_description, request_outcome, active_requests))
Example #42
0
 def wait(self, event: optional(Event) = None) -> bool: # respects wall-time timeout, see issue9892
     remain, event = self.remain, event or self._never_set
     while remain > 0.0:
         event.wait(remain)
         if event.is_set():
             return True
         else:
             remain = self.remain
     else:
         return False
Example #43
0
def extract() -> optional((int, dict, dict)):

    result = {}

    with _perf_lock:

        # establish a reference base time between 60s shared stats from 10s shared stats

        if _perf_dump_60s:
            base_time = _perf_dump_60s[-1][0] + 60
        else:
            for x in _perf_dump_10s:
                if x is not None:
                    base_time = x[0] // 60 * 60
                    break
            else:
                return None # no stats at all

        # copy the 60s shared stats

        for t, d in reversed(_perf_dump_60s):
            time_delta = base_time - t
            assert time_delta % 60 == 0
            minutes_back = time_delta // 60
            if 0 < minutes_back <= 59:
                i = 59 - minutes_back
                for k, v in d.items():
                    result.setdefault(k, ([None] * 59, [None] * 6))[0][i] = v

        # copy the 10s shared stats

        for x in _perf_dump_10s:
            if x is not None:
                t, d = x
                time_delta = t - base_time
                assert time_delta % 10 == 0
                slices_forward = time_delta // 10
                if 0 <= slices_forward < 6:
                    i = slices_forward
                    for k, v in d.items():
                        result.setdefault(k, ([None] * 59, [None] * 6))[1][i] = v

        # copy the global statistics

        stats = _perf_stats.copy()

    return base_time, result, stats
Example #44
0
    def __init__(self, name: str, *,
                 source_cages: tuple_of(valid_cage_name),
                 request_timeout: optional(float) = None,
                 **kwargs):

        self._name = name
        self._source_cages = source_cages

        self._request_timeout = request_timeout or \
                                pmnc.config_interfaces.get("request_timeout") # this is now static

        if pmnc.request.self_test != __name__:
            self._poll = lambda cage: pmnc(cage).reverse_call.poll()
            self._post = lambda cage, request_id, response: pmnc(cage).reverse_call.post(request_id, response)
        else: # self-test
            self._process_revrpc_request = kwargs["process_revrpc_request"]
            self._poll = kwargs["poll"]
            self._post = kwargs["post"]
Example #45
0
def accept(module: str, method: str, args: tuple, kwargs: dict, *,
           source_cage_id: valid_retry_id, retry_deadline: optional(int),
           **options):

    # note that the original unique id of the retried call is used
    # to prevent duplicate invocations and the deadline is also inherited

    if retry_deadline is not None:
        expires_in = max(retry_deadline - time(), 0.0)
    else:
        expires_in = None

    # the actual attempts to execute the local method are deferred, the number
    # of attempts, retry interval and other policy parameters are now under
    # local cage's control, for example the current attempt is again 1

    return pmnc(queue = pmnc.config.get("retry_queue"),
                id = source_cage_id, expires_in = expires_in).\
                __getattr__(module).__getattr__(method)(*args, **kwargs)
def _expanded_bar(s: optional((int, int))) -> [str, str, str, str, str]:

    if s is None:
        return [" "] * 5

    low, high = s; assert 0 <= low <= high <= 39

    low_idx, low_level = divmod(low, 8)
    high_idx, high_level = divmod(high, 8)

    result = [" "] * low_idx
    if low_idx < high_idx:
        result.append(low_level > 0 and ("~" + box_chars[low_level - 1]) or full_block)
        result.extend([full_block] * (high_idx - low_idx - 1))

    result.append(box_chars[high_level])
    result.extend([" "] * (4 - high_idx))

    return result
Example #47
0
def run(*, required_dirs: optional(tuple_of(str)) = ()):

    current_thread().name = "self_test"

    _log("***** STARTING SELF-TEST FOR MODULE {0:s} *****".format(main_module_name.upper()))

    test_cages_dir = _create_temp_cage_copy(required_dirs = required_dirs)
    try:
        pmnc = _start_pmnc(test_cages_dir)
        try:
            try:
                current_thread()._pmnc = pmnc # to be used in active_interface
                assert pmnc.request.self_test == main_module_name
                pmnc.__getattr__(main_module_name).self_test()
            except:
                _log("***** FAILURE: {1:s}".format(main_module_name, exc_string()))
            else:
                _log("***** SUCCESS, BUT EXAMINE THE LOG FOR UNEXPECTED ERRORS *****")
        finally:
            _stop_pmnc(pmnc)
    finally:
        _remove_temp_cage_copy(test_cages_dir)
 def foo(*, k: tc.optional(lambda s: s != "") = None):
     return k
 def foo(x: tc.optional(tc.enum(1, 2)) = 2):
     return x
 def foo(*, k: tc.optional(list) = []):
     k.append(len(k))
     return k
 def foo(*, a: tc.optional(str) = "a"):
     return a
 def foo(a: (int,str) = (1,"!"), *, k: tc.optional(()) = ()) -> (str, ()):
     return a[1], k
 def foo(a: [] = [], *, k: tc.optional([]) = None) -> ([], tc.optional([])):
     return a, k
def test_map_of_with_optional():
    assert tc.map_of(int, tc.optional(str))({ 1: "foo", 2: None }) and \
           tc.map_of(int, tc.optional(str)).check({ 1: "foo", 2: None })
    assert not tc.map_of(int, tc.optional(str))({ None: "foo", 2: None }) and \
           not tc.map_of(int, tc.optional(str)).check({ None: "foo", 2: None })
def test_seq_of_with_optional():
    assert tc.seq_of(tc.optional(tc.re("^foo$")))(["foo", None, "foo"]) and \
           tc.seq_of(tc.optional(tc.re("^foo$"))).check(["foo", None, "foo"])
    assert not tc.seq_of(tc.optional(tc.re("^foo$")))(["123", None, "foo"]) and \
           not tc.seq_of(tc.optional(tc.re("^foo$"))).check(["foo", None, "1234"])
 def foo(*, k: tc.optional([[[[lambda x: x % 3 == 1]]]]) = [[[[4]]]]):
     return k[0][0][0][0]
 def foo(x = None) -> tc.optional(not_none): # note how optional overrides the not_none
     return x
 def foo(*, k: tc.optional(not_none) = None): # note how optional overrides the not_none
     return k
 def foo(*, b: tc.optional(bool) = None) -> bool:
     return b