Пример #1
0
    def train(self, train=True):
        """Trains the parser on training data.

        Args:
            data: Training data, a list of sentences with gold trees.
            n_epochs:
            trunc_data:
        """

        # Find pad value
        tag_pad = self.tag_dict['<PAD/>']
        word_pad = self.word_dict['<PAD/>']

        self.classifier = NeuralNetwork(self.config,
                                        self.x_embeddings,
                                        self.tags_embeddings,
                                        len(self.tag_dict_inv),
                                        feature_type='tagger_1')

        if not train:
            self.classifier.restore_sess()
        else:
            logging.info('Training NN for tagger!')

            # Generate all configurations for training
            x_tagger, y_tagger = generate_data_for_tagger(
                self.word_ids, self.tags, self.sent_lens, tag_pad, word_pad)
            x_dev_tagger, y_dev_tagger = generate_data_for_tagger(
                self.word_ids_dev, self.tags_dev, self.sent_lens_dev, tag_pad,
                word_pad)

            self.classifier.train(x_tagger, y_tagger, x_dev_tagger,
                                  y_dev_tagger)

            logging.info('Training NN for parser!')
Пример #2
0
 def make_population(self,
                     population_size,
                     input_size,
                     layer_sizes,
                     lr=0.5):
     self.individuals = []
     for i in range(population_size):
         network = NeuralNetwork()
         network.make(input_size, layer_sizes, lr)
         self.individuals.append(Individual(network))
Пример #3
0
 def crossover(self, individual2, fitness):
     fitness1 = fitness(self)
     fitness2 = fitness(individual2)
     vector1 = self.neural_network.get_neuron_vector()
     vector2 = individual2.neural_network.get_neuron_vector()
     layer_sizes = self.neural_network.get_layer_sizes()
     #ver proporcion segun fitness
     central_point = int(
         round(fitness1 * len(vector1) / max(fitness1 + fitness2, 1)))
     # print("crsover f1 = %.3f, f2 = %.3f, cp = %d, len=%d" % (fitness1, fitness2, central_point, len(vector1)))
     #crossover
     neurons_final = vector1[:central_point] + vector2[central_point:]
     neural_network = NeuralNetwork()
     neural_network.make_from_neuron_vector(neurons_final, layer_sizes)
     return Individual(neural_network)
Пример #4
0
    def train(self, train=False):
        """Trains the parser on training data.

        Args:
            data: Training data, a list of sentences with gold trees.
            n_epochs:
            trunc_data:
        """
        
        # Find pad value
        word_pad_id = self.word_dict['<PAD/>']
        tag_pad_id = self.tag_dict['<PAD/>']


        # Find tags for training data
        for sent_id in range(self.tags.shape[0]):
            words = self.word_ids[sent_id, :int(self.sent_lens[sent_id])]
            words = words.tolist()

            words = [ str(int(w)) for w in words]
            words = [ self.word_dict_inv[w] for w in words]

            tag_pred = self.tagger.tag(words)
            tag_pred = [ self.tag_dict[w] for w in tag_pred]
            self.tags[sent_id,:int(self.sent_lens[sent_id])] = tag_pred


        # Find tags using the tagger    
        x_parser, y_parser = self.generate_data_for_parser(self.word_ids, self.tags, self.gold_tree, self.sent_lens, tag_pad_id, word_pad_id)

        self.classifier = NeuralNetwork(self.config, self.x_embeddings, self.tags_embeddings, 3, feature_type = 'parser_1') 

        if not train:
            self.classifier.restore_sess()   
        else:
            num_data = x_parser.shape[0]
            num_train = int(num_data*0.99)

            x_train_parser = x_parser[:num_train,:]
            y_train_parser = y_parser[:num_train]

            x_dev_parser = x_parser[num_train:,:]
            y_dev_parser = y_parser[num_train:]

            self.classifier.train(x_train_parser, y_train_parser, x_dev_parser, y_dev_parser)
Пример #5
0
def createNetworkLayout(logger, preprocessor):
    '''
    Returns the network with the specified layout.
    '''

    # Create Neural Network
    network = NeuralNetwork()
    network.createSequentialModel()

    input_shape = (preprocessor.getNetworkData()['input'].shape[1],
                   preprocessor.getNetworkData()['input'].shape[2])
    vokab_length = len(preprocessor.getLabelEncoder().classes_)

    # Add Layers

    # units = how many nodes a layer should have
    # input_shape = shape of the data it will be training
    network.add(LSTM(units=256, input_shape=input_shape,
                     return_sequences=True))

    # rate = fraction of input units that should be dropped during training
    network.add(Dropout(rate=0.3))

    network.add(LSTM(units=512, return_sequences=True))
    network.add(Dropout(rate=0.3))

    network.add(LSTM(units=256))
    network.add(Dense(units=256))
    network.add(Dropout(rate=0.3))

    # units of last layer should have same amount of nodes as the number of different outputs that our system has
    # -> assures that the output of the network will map to our classes
    network.add(Dense(units=vokab_length))
    network.add(Activation('softmax'))

    logger.info("Compiling model...")
    network.compile(_loss='categorical_crossentropy',
                    _optimizer='rmsprop',
                    _metrics=['acc'])

    logger.info("Finished compiling.")
    logger.info("Model Layers: \n[]".format(network._model.summary()))

    return network
                    '--batchsize',
                    help='Number of images per batch',
                    default=1)
parser.add_argument('-e',
                    '--epochs',
                    help='Number of epochs to train',
                    default=1)
args = parser.parse_args()

image_dir = args.image_dir
batch_size = int(args.batchsize)
epochs = int(args.epochs)


def get_image_paths(dir):
    paths = []
    for dirpath, dirnames, filenames in walk(dir):
        for filename in filenames:
            if is_label(filename):
                paths.append(
                    (join(dirpath,
                          remove_label(filename)), join(dirpath, filename)))

    random.shuffle(paths)

    return paths


with NeuralNetwork() as nn:
    nn.train(get_image_paths(image_dir), batch_size, epochs)
Пример #7
0
def createNetworkLayout(logger,
                        preprocessor,
                        layout,
                        loss,
                        optimizer,
                        activation,
                        metrics,
                        weightsPath=None,
                        dropout=0.3,
                        callbacks=[]):
    '''
    Creates the network layout.
    Will validate the weightsPath so you dont have to take care of that.
    Returns the network with the specified layout.
    '''

    # check for correctness and create folder if missing
    if not weightsPath is None:
        weightsPath = validateFolderPath(weightsPath, logger)

    # Create Neural Network
    network = NeuralNetwork()
    network.createSequentialModel()

    input_shape = (preprocessor.getNetworkData()['input'].shape[1],
                   preprocessor.getNetworkData()['input'].shape[2])
    vokab_length = len(preprocessor.getLabelEncoder().classes_)

    # Add Layers

    # units = how many nodes a layer should have
    # input_shape = shape of the data it will be training
    layout = layout

    if layout == 'default':
        network = defaultLayout(network, input_shape, dropout)
    elif layout == 'multi':
        network = multiLSTMLayout(network, input_shape, dropout)
    elif layout == 'bidirectional':
        network = bidirectionalLayout(network, input_shape, dropout)
    elif layout == 'multibidirectional':
        network = multibidirectionalLayout(network, input_shape, dropout)
    #elif layout == 'attention':
    #    network = attentionLayout(network, input_shape)

    # units of last layer should have same amount of nodes as the number of different outputs that our system has
    # last layers are the same for every layout
    # -> assures that the output of the network will map to our classes
    network.add(Dense(units=vokab_length))
    network.add(Activation(activation))

    # compile network
    logger.info("Compiling model...")
    network.compile(_loss=loss,
                    _path=weightsPath,
                    _optimizer=optimizer,
                    _metrics=metrics,
                    _callbacks=callbacks)

    logger.info("Finished compiling.")
    #logger.info("Model Layers: \n[]".format(network._model.summary()))

    return network