def _validate_noise(self, noise, var): # Noise is a list or array if isinstance(noise, (np.ndarray, list)): if len(noise) == 1: pass # Variable is a list/array elif not iscompatible(np.atleast_2d(noise), var) and len(noise) > 1: raise MechanismError( "Noise parameter ({}) does not match default variable ({}). Noise parameter of {} must be specified" " as a float, a function, or an array of the appropriate shape ({})." .format(noise, self.instance_defaults.variable, self.name, np.shape(np.array(var)))) else: for noise_item in noise: if not isinstance( noise_item, (float, int)) and not callable(noise_item): raise MechanismError( "The elements of a noise list or array must be floats or functions. {} is not a valid noise" " element for {}".format(noise_item, self.name)) elif _is_control_spec(noise): pass # Otherwise, must be a float, int or function elif not isinstance(noise, (float, int)) and not callable(noise): raise MechanismError( "Noise parameter ({}) for {} must be a float, " "function, or array/list of these.".format(noise, self.name))
def _instantiate_receiver(self, context=None): """Validate that receiver has been assigned and is compatible with the output of function Notes: * _validate_params verifies that receiver is a parameterState for the matrix parameter of a MappingProjection. * _super()._instantiate_receiver verifies that the projection has not already been assigned to the receiver. """ super()._instantiate_receiver(context=context) # Insure that the learning_signal is compatible with the receiver's weight matrix if not iscompatible(self.value, self.receiver.instance_defaults.variable): raise LearningProjectionError( "The learning_signal of {} ({}) is not compatible with the matrix of " "the MappingProjection ({}) to which it is being assigned ({})" .format(self.name, self.value, self.receiver.value, self.receiver.owner.name)) # Insure that learning_signal has the same shape as the receiver's weight matrix try: receiver_weight_matrix_shape = np.array(self.receiver.value).shape except TypeError: receiver_weight_matrix_shape = 1 try: learning_signal_shape = np.array(self.value).shape except TypeError: learning_signal_shape = 1 # FIX: SHOULD TEST WHETHER IT CAN BE USED, NOT WHETHER IT IS THE SAME SHAPE # # MODIFIED 3/8/17 OLD: # if receiver_weight_matrix_shape != learning_signal_shape: # raise ProjectionError("Shape ({}) of learing_signal matrix for {} from {}" # " must match shape of the weight matrix ({}) for the receiver {}". # format(learning_signal_shape, # self.name, # self.sender.name, # receiver_weight_matrix_shape, # self.receiver.owner.name)) # MODIFIED 3/8/17 END learning_mechanism = self.sender.owner learned_projection = self.receiver.owner # Check if learning_mechanism receives a projection from an ObjectiveMechanism; # if it does, assign it to the objective_mechanism attribute for the projection being learned try: candidate_objective_mech = learning_mechanism.input_states[ ERROR_SIGNAL].path_afferents[0].sender.owner if isinstance(candidate_objective_mech, ObjectiveMechanism ) and candidate_objective_mech._role is LEARNING: learned_projection.objective_mechanism = candidate_objective_mech except TypeError: # learning_mechanism does not receive from an ObjectiveMechanism # (e.g., AutoAssociativeLearningMechanism, which receives straight from a ProcessingMechanism) pass learned_projection.learning_mechanism = learning_mechanism learned_projection.has_learning_projection = True
def validate_setting(self, candidate_setting, reference_setting, pref_ivar_name): """Validate candidate_setting by checking against reference_setting and, if a log_entry, its type :param candidate_setting: :param reference_setting: :return: """ # from Globals.Preferences.ComponentPreferenceSet import kpLogPref # if pref_ivar_name is kpLogPref: # self.validate_log(candidate_setting, self) setting_OK = iscompatible(candidate_setting, reference_setting, **{kwCompatibilityType: Enum}) # setting_OK = iscompatible(candidate_setting, reference_setting) # if not setting_OK and (isinstance(candidate_setting, Enum) or isinstance(reference_setting, Enum)): # if isinstance(candidate_setting, Enum): # raise PreferenceSetError("'{0}' is not a valid value for setting of {1} in {2} of {3}". # format(candidate_setting, pref_ivar_name, self.name, owner_name)) # else if return setting_OK
def _validate_params(self, request_set, target_set=None, context=None): """If sample and target values are specified, validate that they are compatible """ if INPUT_STATES in request_set and request_set[ INPUT_STATES] is not None: input_states = request_set[INPUT_STATES] # Validate that there are exactly two input_states (for sample and target) num_input_states = len(input_states) if num_input_states != 2: raise ComparatorMechanismError( "{} arg is specified for {} ({}), so it must have exactly 2 items, " "one each for {} and {}".format(INPUT_STATES, self.__class__.__name__, len(input_states), SAMPLE, TARGET)) # Validate that input_states are specified as dicts if not all( isinstance(input_state, dict) for input_state in input_states): raise ComparatorMechanismError( "PROGRAM ERROR: all items in input_state args must be converted to dicts" " by calling State._parse_state_spec() before calling super().__init__" ) # Validate length of variable for sample = target if VARIABLE in input_states[0]: # input_states arg specified in standard state specification dict format lengths = [ len(input_state[VARIABLE]) for input_state in input_states ] else: # input_states arg specified in {<STATE_NAME>:<STATE SPECIFICATION DICT>} format lengths = [ len(list(input_state_dict.values())[0][VARIABLE]) for input_state_dict in input_states ] if lengths[0] != lengths[1]: raise ComparatorMechanismError( "Length of value specified for {} InputState of {} ({}) must be " "same as length of value specified for {} ({})".format( SAMPLE, self.__class__.__name__, lengths[0], TARGET, lengths[1])) elif SAMPLE in request_set and TARGET in request_set: sample = request_set[SAMPLE] if isinstance(sample, InputState): sample_value = sample.value elif isinstance(sample, Mechanism): sample_value = sample.input_value[0] elif is_value_spec(sample): sample_value = sample else: sample_value = None target = request_set[TARGET] if isinstance(target, InputState): target_value = target.value elif isinstance(target, Mechanism): target_value = target.input_value[0] elif is_value_spec(target): target_value = target else: target_value = None if sample is not None and target is not None: if not iscompatible( sample, target, **{ kwCompatibilityLength: True, kwCompatibilityNumeric: True }): raise ComparatorMechanismError( "The length of the sample ({}) must be the same as for the target ({})" "for {} {}".format(len(sample), len(target), self.__class__.__name__, self.name)) super()._validate_params(request_set=request_set, target_set=target_set, context=context)
def _validate_params(self, request_set, target_set=None, context=None): """Validate FUNCTION and Mechanism params """ super()._validate_params(request_set=request_set, target_set=target_set, context=context) # Validate FUNCTION if FUNCTION in target_set: transfer_function = target_set[FUNCTION] # FUNCTION is a Function if isinstance(transfer_function, Component): transfer_function_class = transfer_function.__class__ transfer_function_name = transfer_function.__class__.__name__ # FUNCTION is a function or method elif isinstance(transfer_function, (function_type, method_type)): transfer_function_class = transfer_function.__self__.__class__ transfer_function_name = transfer_function.__self__.__class__.__name__ # FUNCTION is a class elif inspect.isclass(transfer_function): transfer_function_class = transfer_function transfer_function_name = transfer_function.__name__ if not transfer_function_class.componentType is TRANSFER_FUNCTION_TYPE and not transfer_function_class.componentType is NORMALIZING_FUNCTION_TYPE: raise TransferError( "Function {} specified as FUNCTION param of {} must be a {}" .format(transfer_function_name, self.name, TRANSFER_FUNCTION_TYPE)) # Validate INITIAL_VALUE if INITIAL_VALUE in target_set: initial_value = target_set[INITIAL_VALUE] if initial_value is not None: if not iscompatible(initial_value, self.instance_defaults.variable): raise Exception( "initial_value is {}, type {}\nself.instance_defaults.variable is {}, type {}" .format( initial_value, type(initial_value).__name__, self.instance_defaults.variable, type(self.instance_defaults.variable).__name__, )) raise TransferError( "The format of the initial_value parameter for {} ({}) must match its input ({})" .format( append_type_to_name(self), initial_value, self.instance_defaults.variable[0], )) # FIX: SHOULD THIS (AND TIME_CONSTANT) JUST BE VALIDATED BY INTEGRATOR FUNCTION NOW THAT THEY ARE PROPERTIES?? # Validate NOISE: if NOISE in target_set: self._validate_noise(target_set[NOISE], self.instance_defaults.variable) # Validate TIME_CONSTANT: if TIME_CONSTANT in target_set: time_constant = target_set[TIME_CONSTANT] if (not (isinstance(time_constant, float) and 0 <= time_constant <= 1)) and (time_constant != None): raise TransferError( "time_constant parameter ({}) for {} must be a float between 0 and 1" .format(time_constant, self.name)) # Validate RANGE: if CLIP in target_set: clip = target_set[CLIP] if clip: if not (isinstance(clip, tuple) and len(clip) == 2 and all(isinstance(i, numbers.Number) for i in clip)): raise TransferError( "clip parameter ({}) for {} must be a tuple with two numbers" .format(clip, self.name)) if not clip[0] < clip[1]: raise TransferError( "The first item of the clip parameter ({}) must be less than the second" .format(clip, self.name))