def test_softmax(): x = np.random.randn(3, 4, 5) s = softmax(x, axis=1) assert s.shape == (3, 4, 5) and (s > 0).all() and (s < 1).all() and np.allclose( np.sum(s, axis=1), 1)
def test_cost_functions(): actual = np.random.rand(100, 3) target = np.random.randint(3, size=100) onehot_target = OneHotEncoding(n_classes=3)(target) assert np.allclose(percent_correct.compile()(actual, target), 100 * (np.argmax(actual, axis=1) == target).mean()) assert np.allclose(mean_squared_error.compile()(actual, onehot_target), (((actual - onehot_target)**2).sum(axis=1)).mean()) nll_func = negative_log_likelihood.compile() with pytest.raises(AssertionError): nll_func(actual, target) softmax_actual = softmax(actual, axis=1) assert np.allclose( nll_func(softmax_actual, target), -np.log(softmax_actual[np.arange(actual.shape[0]), target]).mean()) assert np.allclose( softmax_negative_log_likelihood.compile()(actual, target), nll_func(softmax_actual, target)) normalized_actual = actual / actual.sum(axis=1, keepdims=True) assert np.allclose( normalized_negative_log_likelihood.compile()(normalized_actual, target), nll_func(normalized_actual, target))
def activation_function(data, function_name): if function_name == 'relu': return np.maximum(0, data) elif function_name == 'linear': return data elif function_name == 'softmax': return softmax(data, axis=-1) elif function_name in ('sigm', 'sigmoid'): return 1. / (1 + np.exp(-data)) else: raise Exception('Add it.')
def get_synthethic_linear_dataset(noise_level = 0.1, n_input_dims = 20, n_output_dims = 4, n_training_samples = 1000, n_test_samples = 200, nonlinearity = None, offset_mag = 0, seed = 8158): """ A Synthethic dataset that can be used for testing generalized linear models. :param noise_level: :param n_input_dims: :param n_output_dims: :param n_training_samples: :param n_test_samples: :param nonlinearity: :param seed: :return: """ input_singleton = n_input_dims == 0 if input_singleton: n_input_dims = 1 output_singleton = n_output_dims == 0 if output_singleton: # Unfortunately we have to deal with the inconsistencies in numpy's handling of singleton dimensions. n_output_dims = 1 rng = np.random.RandomState(seed) w = rng.randn(n_input_dims, n_output_dims) * 1/np.sqrt(n_input_dims) input_data = rng.randn(n_training_samples+n_test_samples, n_input_dims) target_data = np.dot(input_data, w) + offset_mag * rng.randn(n_output_dims) + noise_level*rng.randn(n_training_samples+n_test_samples, n_output_dims) if nonlinearity=='softmax': target_data = softmax(target_data, axis=1), elif nonlinearity=='sigmoid': target_data = sigm(target_data) elif nonlinearity=='argmax': target_data==np.argmax(target_data, axis=1) elif nonlinearity is None: target_data = target_data else: assert callable(nonlinearity), 'Unknown nonlinearity: {}'.format(nonlinearity) target_data = nonlinearity(target_data) if input_singleton: input_data = input_data[:, 0] if output_singleton: target_data = target_data[:, 0] return DataSet( training_set = DataCollection(input_data[:n_training_samples], target_data[:n_training_samples]), test_set = DataCollection(input_data[n_training_samples:], target_data[n_training_samples:]), )
def softmax_categorical_xe(actual, target): """ :param actual: An (n_samples, n_dims) array identifying pre-logistic output :param target: An (n_samples, ) integer array identifying labels :return: """ if target.ndim==1: assert target.dtype==int and np.max(target) < actual.shape[1] elif target.ndim==2: assert np.all(target.sum(axis=1)==1) and np.all(np.max(target, axis=1)==1) target = np.argmax(target, axis=1) else: raise Exception("Don't know how to interpret a {}-D target".format(target)) return np.mean(softmax(actual, axis=1)[np.arange(actual.shape[0]), target], axis=0)
def activation_function(data, function_name): if function_name=='relu': return np.maximum(0, data) elif function_name=='linear': return data elif function_name=='softmax': return softmax(data, axis=-1) elif function_name=='softplus': return np.log(np.exp(data)+1) elif function_name in ('sigm', 'sigmoid'): return 1./(1+np.exp(-data)) elif function_name == 'tanh': return np.tanh(data) else: raise Exception('No Nonlinearity "{}". Add it.'.format(function_name))
def activation_function(data, function_name): if function_name=='relu': return np.maximum(0, data) elif function_name=='linear': return data elif function_name=='softmax': return softmax(data, axis=-1) elif function_name=='softplus': return np.log(np.exp(data)+1) elif function_name in ('sigm', 'sigmoid'): return 1./(1+np.exp(-data)) elif function_name == 'tanh': return np.tanh(data) elif function_name == 'clip': return np.clip(data, 0, 1) else: raise Exception('No Nonlinearity "{}". Add it.'.format(function_name))
def predict(self, x): # x: (n_samples, n_in) """ :param x: A (n_samples, n_in) input array :return: A (n_samples, n_out) array of output probabilities for each sample. """ return softmax(x.dot(self.w), axis=1)
def predict(self, x): """ :param x: An (n_samples, n_inputs) input :return: An (n_samples, n_classes) class probability """ return softmax(x.dot(self.w), axis=1)
def get_synthethic_linear_dataset(noise_level=0.1, n_input_dims=20, n_output_dims=4, n_training_samples=1000, n_test_samples=200, nonlinearity=None, offset_mag=0, seed=8158): """ A Synthethic dataset that can be used for testing generalized linear models. :param noise_level: :param n_input_dims: :param n_output_dims: :param n_training_samples: :param n_test_samples: :param nonlinearity: :param seed: :return: """ input_singleton = n_input_dims == 0 if input_singleton: n_input_dims = 1 output_singleton = n_output_dims == 0 if output_singleton: # Unfortunately we have to deal with the inconsistencies in numpy's handling of singleton dimensions. n_output_dims = 1 rng = np.random.RandomState(seed) w = rng.randn(n_input_dims, n_output_dims) * 1 / np.sqrt(n_input_dims) input_data = rng.randn(n_training_samples + n_test_samples, n_input_dims) target_data = np.dot( input_data, w) + offset_mag * rng.randn(n_output_dims) + noise_level * rng.randn( n_training_samples + n_test_samples, n_output_dims) if nonlinearity == 'softmax': target_data = softmax(target_data, axis=1), elif nonlinearity == 'sigmoid': target_data = sigm(target_data) elif nonlinearity == 'argmax': target_data == np.argmax(target_data, axis=1) elif nonlinearity is None: target_data = target_data else: assert callable(nonlinearity), 'Unknown nonlinearity: {}'.format( nonlinearity) target_data = nonlinearity(target_data) if input_singleton: input_data = input_data[:, 0] if output_singleton: target_data = target_data[:, 0] return DataSet( training_set=DataCollection(input_data[:n_training_samples], target_data[:n_training_samples]), test_set=DataCollection(input_data[n_training_samples:], target_data[n_training_samples:]), )
def test_softmax(): x = np.random.randn(3, 4, 5) s = softmax(x, axis=1) assert s.shape==(3, 4, 5) and (s>0).all() and (s<1).all() and np.allclose(np.sum(s, axis=1), 1)