def _check_params(self, parameters): """Checks for mistakes in 'parameters' Args : parameters: dict, parameters to be checked Raises : ValueError: if any parameter is not a valid argument for the target function or the target function is not defined TypeError: if argument parameters is not iterable """ a_valid_fn = [] if self.target_fn is None: if callable(self): a_valid_fn.append(self.__call__) else: raise TypeError( 'invalid argument: tested object is not callable,\ please provide a valid target_fn') elif isinstance(self.target_fn, types.FunctionType) \ or isinstance(self.target_fn, types.MethodType): a_valid_fn.append(self.target_fn) else: a_valid_fn.append(self.target_fn.__call__) if not isinstance(parameters, str): for p in parameters: for fn in a_valid_fn: if has_arg(fn, p): pass else: raise ValueError( '{} is not a valid parameter'.format(p)) else: raise TypeError('invalid argument: list or dictionnary expected')
def _check_params(self, parameters): a_valid_fn = [] if self.target_fn is None: if callable(self): a_valid_fn.append(self.__call__) else: raise TypeError( 'invalid argument: tested object is not callable,\ please provide a valid target_fn') elif isinstance(self.target_fn, types.FunctionType) \ or isinstance(self.target_fn, types.MethodType): a_valid_fn.append(self.target_fn) else: a_valid_fn.append(self.target_fn.__call__) if not isinstance(parameters, str): for p in parameters: for fn in a_valid_fn: if has_arg(fn, p): pass else: raise ValueError( '{} is not a valid parameter'.format(p)) else: raise TypeError('invalid argument: list or dictionnary expected')
def filter_params(self, fn, override=None): override = override or {} result = {} for name, value in self.target_params.items(): if has_arg(fn, name): result.update({name: value}) result.update(override) return result
def filter_params(self, fn, override=None): """Filters `target_params` and return those in `fn`'s arguments. Args: fn : arbitrary function override: dict, values to override target_params Returns: result : dict, dictionary containing variables in both target_params and fn's arguments. """ override = override or {} result = {} for name, value in self.target_params.items(): if has_arg(fn, name): result.update({name: value}) result.update(override) return result
def layer_test(layer_cls, kwargs={}, input_shape=None, input_dtype=None, input_data=None, expected_output=None, expected_output_dtype=None, fixed_batch_size=False): """Test routine for a layer with a single input tensor and single output tensor. """ # generate input data if input_data is None: assert input_shape if not input_dtype: input_dtype = K.floatx() input_data_shape = list(input_shape) for i, e in enumerate(input_data_shape): if e is None: input_data_shape[i] = np.random.randint(1, 4) input_data = (10 * np.random.random(input_data_shape)) input_data = input_data.astype(input_dtype) else: if input_shape is None: input_shape = input_data.shape if input_dtype is None: input_dtype = input_data.dtype if expected_output_dtype is None: expected_output_dtype = input_dtype # instantiation layer = layer_cls(**kwargs) # test get_weights , set_weights at layer level weights = layer.get_weights() layer.set_weights(weights) try: expected_output_shape = layer.compute_output_shape(input_shape) except: expected_output_shape = layer._compute_output_shape(input_shape) # test in functional API if fixed_batch_size: x = Input(batch_shape=input_shape, dtype=input_dtype) else: x = Input(shape=input_shape[1:], dtype=input_dtype) y = layer(x) assert K.dtype(y) == expected_output_dtype # check with the functional API model = Model(x, y) actual_output = model.predict(input_data) actual_output_shape = actual_output.shape for expected_dim, actual_dim in zip(expected_output_shape, actual_output_shape): if expected_dim is not None: assert expected_dim == actual_dim if expected_output is not None: assert_allclose(actual_output, expected_output, rtol=1e-3) # test serialization, weight setting at model level model_config = model.get_config() recovered_model = model.__class__.from_config(model_config) if model.weights: weights = model.get_weights() recovered_model.set_weights(weights) _output = recovered_model.predict(input_data) assert_allclose(_output, actual_output, rtol=1e-3) # test training mode (e.g. useful when the layer has a # different behavior at training and testing time). if has_arg(layer.call, 'training'): model.compile('rmsprop', 'mse') model.train_on_batch(input_data, actual_output) # test instantiation from layer config layer_config = layer.get_config() layer_config['batch_input_shape'] = input_shape layer = layer.__class__.from_config(layer_config) # for further checks in the caller function return actual_output