コード例 #1
0
    def __init__(self, nIn, nOut, weights=None,
                 activation='softmax', isClassifierLayer=True):

        # Get activation function from string
        # Notice the functional programming paradigms of Python + Numpy
        self.activationString = activation
        self.activation = Activation.getActivation(self.activationString)
        self.activationPrime = Activation.getDerivative(self.activationString)

        self.nIn = nIn
        self.nOut = nOut

        # Adding bias
        self.input = np.ndarray((nIn+1, 1))
        self.input[0] = 1
        self.output = np.ndarray((nOut, 1))
        self.delta = np.zeros((nOut, 1))

        # You can have better initialization here
        # wij means the weight from Input(j) to the Output(i)
        if weights is None:
            rns = np.random.RandomState(int(time.time()))
            self.weights = rns.uniform(size=(nOut, nIn + 1))-0.5
        else:
            self.weights = weights

        self.isClassifierLayer = isClassifierLayer

        # Some handy properties of the layers
        self.size = self.nOut
        self.shape = self.weights.shape
コード例 #2
0
    def __init__(self,
                 nIn,
                 nOut,
                 weights=None,
                 activation='sigmoid',
                 isClassifierLayer=False):

        # Get activation function from string
        self.activationString = activation
        self.activation = Activation.getActivation(self.activationString)
        self.activationDerivative = Activation.getDerivative(
            self.activationString)

        self.nIn = nIn
        self.nOut = nOut

        self.inp = np.ndarray((nIn + 1, 1))
        #  self.inp[0] = 1
        self.outp = np.ndarray((nOut, 1))
        self.deltas = np.zeros((nOut, 1))

        # You can have better initialization here
        if weights is None:
            rns = np.random.RandomState(int(time.time()))
            self.weights = rns.uniform(size=(nIn + 1, nOut)) - 0.5
        else:
            assert (weights.shape == (nIn + 1, nOut))
            self.weights = weights

        self.isClassifierLayer = isClassifierLayer

        # Some handy properties of the layers
        self.size = self.nOut
        self.shape = self.weights.shape
コード例 #3
0
ファイル: logistic_layer.py プロジェクト: aldemel/NNPraktikum
    def __init__(self, n_in, n_out, weights=None,
                 activation='sigmoid', is_classifier_layer=False):

        # Get activation function from string
        self.activation_string = activation
        self.activation = Activation.getActivation(self.activation_string)

        self.n_in = n_in
        self.n_out = n_out

        self.inp = np.ndarray((n_in+1, 1))
        self.inp[0] = 1
        self.outp = np.ndarray((n_out, 1))
        self.deltas = np.zeros((n_out, 1))

        # You can have better initialization here
        if weights is None:
            self.weights = np.random.rand(n_in, n_out)/10
        else:
            self.weights = weights

        self.is_classifier_layer = is_classifier_layer

        # Some handy properties of the layeurs
        self.size = self.n_out
        self.shape = self.weights.shape
コード例 #4
0
ファイル: logistic_layer.py プロジェクト: garanog/NNPraktikum
    def __init__(self, nIn, nOut, weights=None,
                 activation='softmax', isClassifierLayer=True):

        # Get activation function from string
        # Notice the functional programming paradigms of Python + Numpy
        self.activationString = activation
        self.activation = Activation.getActivation(self.activationString)

        self.nIn = nIn
        self.nOut = nOut

        self.input = np.ndarray((nIn+1, 1))
        self.input[0] = 1
        self.output = np.ndarray((nOut, 1))
        self.delta = np.zeros((nOut, 1))

        # You can have better initialization here
        if weights is None:
            rns = np.random.RandomState(int(time.time()))
            self.weights = rns.uniform(size=(nOut, nIn + 1))-0.5
        else:
            self.weights = weights

        self.isClassifierLayer = isClassifierLayer

        # Some handy properties of the layers
        self.size = self.nOut
        self.shape = self.weights.shape
コード例 #5
0
 def compute_output(self, input):
     if len(input) != len(self.weights):
         raise ValueError("MLPNeuron: Bad input dimensions: "
                          "Got vector of length {}, expected {}".format(
                              len(input), len(self.weights)))
     weighted_sum = np.dot(input, self.weights) + self.bias
     return Activation.getActivation(self.activation)(weighted_sum)
コード例 #6
0
ファイル: layer.py プロジェクト: MikhailAristov/NNPraktikum
    def __init__(self, nIn, nOut, weights=None, activation='sigmoid'):

        # Get activation function from string
        # Notice the functional programming paradigms of Python + Numpy
        self.activationString = activation
        self.activation = Activation.getActivation(self.activationString)

        self.nIn = nIn
        self.nOut = nOut

        # Some handy properties of the layers
        self.size = self.nOut
        self.shape = self.weights.shape

        # You can have better initialization here
        if weights is None:
            rns = np.random.RandomState(int(time.time()))
            self.weights = rns.uniform(size=(nOut, nIn + 1))
        else:
            self.weights = weights
コード例 #7
0
    def __init__(self, train, valid, test,
                 learningRate=0.01, epochs=50,
                 activation='sigmoid',
                 error='mse'):

        self.learningRate = learningRate
        self.epochs = epochs

        self.trainingSet = train
        self.validationSet = valid
        self.testSet = test

        # Initialize the weight vector with small random values
        # between -0.3 and 0.3 to encourage sigmoid function learning
        self.weight = np.random.rand(self.trainingSet.input.shape[1]) * 0.6 - 0.3
                    # np.ones(self.trainingSet.input.shape[1])

        self.activation = Activation.getActivation(activation)
        self.activationPrime = Activation.getDerivative(activation)
        self.activationString = activation[0].upper() + activation[1:]

        self.erString = error

        if error == 'absolute':
            self.erf = erf.AbsoluteError()
        elif error == 'different':
            self.erf = erf.DifferentError()
        elif error == 'mse':
            self.erf = erf.MeanSquaredError()
        elif error == 'sse':
            self.erf = erf.SumSquaredError()
        elif error == 'bce':
            self.erf = erf.BinaryCrossEntropyError()
        elif error == 'crossentropy':
            self.erf = erf.CrossEntropyError()
        else:
            raise ValueError('Cannot instantiate the requested '
                             'error function: ' + error + 'not available')