def __init__(self, nIn, nOut, weights=None, activation='softmax', isClassifierLayer=True): # Get activation function from string # Notice the functional programming paradigms of Python + Numpy self.activationString = activation self.activation = Activation.getActivation(self.activationString) self.activationPrime = Activation.getDerivative(self.activationString) self.nIn = nIn self.nOut = nOut # Adding bias self.input = np.ndarray((nIn+1, 1)) self.input[0] = 1 self.output = np.ndarray((nOut, 1)) self.delta = np.zeros((nOut, 1)) # You can have better initialization here # wij means the weight from Input(j) to the Output(i) if weights is None: rns = np.random.RandomState(int(time.time())) self.weights = rns.uniform(size=(nOut, nIn + 1))-0.5 else: self.weights = weights self.isClassifierLayer = isClassifierLayer # Some handy properties of the layers self.size = self.nOut self.shape = self.weights.shape
def __init__(self, nIn, nOut, weights=None, activation='sigmoid', isClassifierLayer=False): # Get activation function from string self.activationString = activation self.activation = Activation.getActivation(self.activationString) self.activationDerivative = Activation.getDerivative( self.activationString) self.nIn = nIn self.nOut = nOut self.inp = np.ndarray((nIn + 1, 1)) # self.inp[0] = 1 self.outp = np.ndarray((nOut, 1)) self.deltas = np.zeros((nOut, 1)) # You can have better initialization here if weights is None: rns = np.random.RandomState(int(time.time())) self.weights = rns.uniform(size=(nIn + 1, nOut)) - 0.5 else: assert (weights.shape == (nIn + 1, nOut)) self.weights = weights self.isClassifierLayer = isClassifierLayer # Some handy properties of the layers self.size = self.nOut self.shape = self.weights.shape
def __init__(self, train, valid, test, learningRate=0.01, epochs=50, activation='sigmoid', error='mse'): self.learningRate = learningRate self.epochs = epochs self.trainingSet = train self.validationSet = valid self.testSet = test # Initialize the weight vector with small random values # between -0.3 and 0.3 to encourage sigmoid function learning self.weight = np.random.rand(self.trainingSet.input.shape[1]) * 0.6 - 0.3 # np.ones(self.trainingSet.input.shape[1]) self.activation = Activation.getActivation(activation) self.activationPrime = Activation.getDerivative(activation) self.activationString = activation[0].upper() + activation[1:] self.erString = error if error == 'absolute': self.erf = erf.AbsoluteError() elif error == 'different': self.erf = erf.DifferentError() elif error == 'mse': self.erf = erf.MeanSquaredError() elif error == 'sse': self.erf = erf.SumSquaredError() elif error == 'bce': self.erf = erf.BinaryCrossEntropyError() elif error == 'crossentropy': self.erf = erf.CrossEntropyError() else: raise ValueError('Cannot instantiate the requested ' 'error function: ' + error + 'not available')