Exemple #1
0
    def __init_layers(self, layer_spec):
        self.layers = []
        last_index = len(layer_spec) - 1
        for i, size in enumerate(layer_spec):
            if i == 0:
                self.layers.append(InputLayer(size, self.activation_fn))
            elif i == last_index:
                self.layers.append(OutputLayer(size, self.activation_fn))
            else:
                self.layers.append(HiddenLayer(size, self.activation_fn))

        for i in range(len(self.layers) - 1):
            self.__join_layers(self.layers[i], self.layers[i+1])
Exemple #2
0
def main():

    sess = tf.Session()

    image = read_image('../data/heart.jpg')
    image = np.reshape(image, [1, 224, 224, 3])  # type numpy.ndarray
    image.astype(np.float32)

    parser = Parser('../data/alexnet.cfg')
    network_builder = NetworkBuilder("test")  # type: NetworkBuilder
    network_builder.set_parser(parser)
    network = network_builder.build()  # type: Network
    network.add_input_layer(InputLayer(tf.float32, [None, 224, 224, 3]))
    network.add_output_layer(OutputLayer())
    network.connect_each_layer()

    sess.run(tf.global_variables_initializer())
    fc_layer = sess.run(network.output, feed_dict={network.input: image})
        def setUpClass( self ):
            from opencl import OpenCL
            from layer import InputLayer, OutputLayer, ExecutionContext

            self.ocl = OpenCL( pyopencl.create_some_context() )

            self.i = InputLayer( 2, self.ocl )
            self.o = OutputLayer( 1, self.ocl )

            self.i.link_next( self.o )

            self.nnc = ExecutionContext( self.i, self.o, allow_training = True )

            self.i.set_weights( numpy.array( [ 0.1 ] * self.i.weights_count, numpy.float32 ) )
            self.o.set_weights( numpy.array( [ 0.3 ] * self.o.weights_count, numpy.float32 ) )

            self.tr = TrainingResults()

            self._create_method()
Exemple #4
0
def trainModel(X, Y, devX, devY):
    m = Model()

    # Vstupne a vystupne rozmery
    n_x, _ = X.shape
    n_y, _ = Y.shape

    # Input -> Dense(3) -> Dense(1)
    m.addLayer(InputLayer(n_x))

    #m.addLayer(DenseLayer(5, act="linear"))
    #m.addLayer(BatchNormLayer())
    #m.addLayer(ActivationLayer(act="relu"))

    m.addLayer(DenseLayer(5, act="relu"))
    #m.addLayer(BatchNormLayer())
    #m.addLayer(ActivationLayer(act="relu"))

    m.addLayer(DenseLayer(n_y, act="sigmoid"))
    
    # Prepare all layer internals
    params = {
        
        # Optimizer parameters
        "learning_rate": 0.01,
        "beta": 0.9,               # Momentum, RMSProp
        "beta1": 0.9,               # Adam
        "beta2": 0.999,

        # MiniBatch
        "batch_size": 0            # 0 = Gradient descent. No minibatches
    }
    m.initialize(params, lossName="bce", optName="gd")

    # Train the shit
    data = m.train(X, Y, 10000, devX, devY)  
    PlotModel(m, data, devX, devY)
    def add(self, layer):
        """
        Adds a layer to the neural network's layer stack
        
        Inputs
        ------
            @param layer : A layer instance
        """
        if (not isinstance(layer, Layer)):
            raise TypeError("The added layer must be an instance "
                            "of class Layer. Found {}".format(type(layer)))
        if (not self.outputs):
            if (len(layer.inbound_connections) == 0):
                #create an input layer
                if (not hasattr(layer, 'input_shape')
                        or layer.input_shape is None):
                    raise ValueError("The first layer in a NeuralNetwork "
                                     "model must have an 'input_shape'")
                input_shape = layer.input_shape
                self.add(InputLayer(input_shape=input_shape))
                self.add(layer)
                return

            if (len(layer.inbound_connections) != 1):
                raise ValueError("The layer added to NeuralNetwork model "
                                "must not be connected elsewhere."
                                "Receiver layer {}".format(layer.name) + \
                                " which has " + \
                                str(len(layer.inbound_connections)) +\
                                " inbound connections")
            if (len(layer.inbound_connections[0].output_tensors) != 1):
                raise ValueError(
                    "The layer added to NeuralNetwork "
                    "must have a  single output tensor."
                    " Use a different API for multi-output layers")

            self.outputs = [layer.inbound_connections[0].output_tensors[0]]
            self.inputs = get_source_inputs(self.outputs[0])

            MLConnection(outbound_model=self,
                         inbound_models=[],
                         connection_indices=[],
                         tensor_indices=[],
                         input_tensors=self.inputs,
                         output_tensors=self.outputs,
                         input_shapes=[x._nuro_shape for x in self.inputs],
                         output_shapes=[self.outputs[0]._nuro_shape])
        else:
            output_tensor = layer(self.outputs[0])
            if (isinstance(output_tensor, list)):
                raise ValueError(
                    "The layer added to NeuralNetwork "
                    "must have a  single output tensor."
                    " Use a different API for multi-output layers")
            self.outputs = [output_tensor]
            self.inbound_connections[0].output_tensors = self.outputs
            self.inbound_connections[0].outputs_shapes = [
                self.outputs[0]._nuro_shape
            ]

        self.layers.append(layer)
        self.is_built = False
Exemple #6
0
# y = []
# for i in range(-1000, 1000, 2):
#     a = np.random.rand()/10
#     X.append(np.array([i*a]).reshape(1, ))
#     y.append(np.array([f(i*a)]).reshape(1, ))

# X = np.array(X)
# y = np.array(y)

# N = NeuralNetwork(eta=.001, optimizer="adam")
# N.add_layer(InputLayer(784))
# N.add_layer(Layer(128, activation='leaky'))
# N.add_layer(Layer(32, activation='leaky'))
# N.add_layer(Layer(16, activation='leaky'))
# N.add_layer(Layer(10, activation='sig'))

# N.fit(X_train, y_train, epochs=10)

# # for i in range(-100, 100, 3):
# #     print(str(N.predict(np.array([i]).reshape(1, ))) + " " + str(f(i)))

# print(N.evaluate(X_test, y_test))

network = NeuralNetwork()
network.add_layer(InputLayer(3))
network.add_layer(Layer(2))
network.add_layer(Layer(1))

result = network.feedforward(np.array([1, 1, 1]))
print(result)