Пример #1
0
    def BuildSampleXorNetwork(self):

        """
        Builds a simple two input, one output, two node hidden layer network in
        order learn the XOR function.  This network is the "simplest" net that
        can be built to do something "useful".  It's intended to be used for
        testing and debugging the training from end to end.
        """
        temp = [
            {
                'transfer_function': 'linear'
                , 'number_of_nodes': 2
                , 'layerName': 'input'
            }
            , {
                'transfer_function': 'sigmoidal'
                , 'number_of_nodes': 2
                , 'layerName': 'hidden layer'
            }
            , {
                'transfer_function': 'linear'
                , 'number_of_nodes': 1
                , 'layerName': 'output'
            }]
        """ NodeFactory is a helper function for making nodes."""
        nf = NodeFactory()
        for x in temp:
            """
            build layers
            """
            # this is a bit funky, NodeFactory is called for each
            # iteration though the loop and it returns a node.  So in the
            # end we have a list of layers lists, with each layer list
            # being composed of some Node type.
            self.layerList.append(Layer(
                x['layerName'], 1, [nf.makeNode(x['transfer_function']) for y
                                    in range(0, x['number_of_nodes'])], True))

        # This network is for testing.  To make the unit testing easier we
        # just set the seed to get the same sequence of random numbers each
        # time.
        np.random.seed(42)

        for idx, x in enumerate(self.layerList[1:]):
            """
            create random weights matrices based on layer definitions
            """
            rows = len(self.layerList[idx].NodeList)
            cols = len(x.NodeList)
            temp = Weight(rows, cols)
            self.weightList.append(temp)
Пример #2
0
 def __init__(self, layer_name, layer_number, node_list, has_bias):
     self.layerName = layer_name
     self.layerNumber = layer_number
     self.NodeList = node_list
     self.hasBias = has_bias
     if has_bias:
         nf = NodeFactory()
         self.NodeList.append(nf.makeNode('bias'))
     self.output = matrix(zeros((1, len(self.NodeList))))
     if has_bias:
         self.gradient = matrix(zeros((1, len(self.NodeList) - 1 )))
         self.output_derivative = matrix(zeros((1, len(self.NodeList) - 1)))
     else:
         self.gradient = matrix(zeros((1, len(self.NodeList))))
         self.output_derivative = matrix(zeros((1, len(self.NodeList))))
Пример #3
0
    def BuildNetworkFromJSON(self, inputFile):
        """
        Reads in a JSON formatted file and builds the network defined in the JSON
        A Neural Network is nothing more than lists of Nodes, and arrays of Weights                self.weight_update()

        """

        with open(inputFile) as nd:
            nn = nd.read()
            struct = json.loads(nn)

        for idx, x in enumerate(struct['layers']):
            """
            build layers
            """
            # this is a bit funky, NodeFactory is called for each
            # iteration though the loop and it returns a node.  So in the
            # end we have a list of layers, with each layer in the list                self.weight_update()

            # being composed of a list of Nodes.
            # @todo allow for heterogeneous lists of nodes, i.e. not all needs need
            # to have the same transfer function
            nf = NodeFactory()
            node_list = [nf.makeNode(x['transfer_function']) for y
                         in range(0, x['number_of_nodes'])]

            self.layerList.append(Layer(
                x['layerName'], idx, node_list, True if x['has_bias'] == 1 else False))


        x_minus_one = len(self.layerList[0].NodeList)
        for x in self.layerList[1:]:
            """
            create random weights matrices based on layer definitions.  The minus one on the second
            argument assumes that each layer has an extra bias node.
            """
            self.weightList.append(Weight(x_minus_one, len(x.NodeList) - (1 if x.hasBias else 0)))
            x_minus_one = len(x.NodeList)