Пример #1
0
def createNetworkLayout(logger, preprocessor):
    '''
    Returns the network with the specified layout.
    '''

    # Create Neural Network
    network = NeuralNetwork()
    network.createSequentialModel()

    input_shape = (preprocessor.getNetworkData()['input'].shape[1],
                   preprocessor.getNetworkData()['input'].shape[2])
    vokab_length = len(preprocessor.getLabelEncoder().classes_)

    # Add Layers

    # units = how many nodes a layer should have
    # input_shape = shape of the data it will be training
    network.add(LSTM(units=256, input_shape=input_shape,
                     return_sequences=True))

    # rate = fraction of input units that should be dropped during training
    network.add(Dropout(rate=0.3))

    network.add(LSTM(units=512, return_sequences=True))
    network.add(Dropout(rate=0.3))

    network.add(LSTM(units=256))
    network.add(Dense(units=256))
    network.add(Dropout(rate=0.3))

    # units of last layer should have same amount of nodes as the number of different outputs that our system has
    # -> assures that the output of the network will map to our classes
    network.add(Dense(units=vokab_length))
    network.add(Activation('softmax'))

    logger.info("Compiling model...")
    network.compile(_loss='categorical_crossentropy',
                    _optimizer='rmsprop',
                    _metrics=['acc'])

    logger.info("Finished compiling.")
    logger.info("Model Layers: \n[]".format(network._model.summary()))

    return network
Пример #2
0
def createNetworkLayout(logger,
                        preprocessor,
                        layout,
                        loss,
                        optimizer,
                        activation,
                        metrics,
                        weightsPath=None,
                        dropout=0.3,
                        callbacks=[]):
    '''
    Creates the network layout.
    Will validate the weightsPath so you dont have to take care of that.
    Returns the network with the specified layout.
    '''

    # check for correctness and create folder if missing
    if not weightsPath is None:
        weightsPath = validateFolderPath(weightsPath, logger)

    # Create Neural Network
    network = NeuralNetwork()
    network.createSequentialModel()

    input_shape = (preprocessor.getNetworkData()['input'].shape[1],
                   preprocessor.getNetworkData()['input'].shape[2])
    vokab_length = len(preprocessor.getLabelEncoder().classes_)

    # Add Layers

    # units = how many nodes a layer should have
    # input_shape = shape of the data it will be training
    layout = layout

    if layout == 'default':
        network = defaultLayout(network, input_shape, dropout)
    elif layout == 'multi':
        network = multiLSTMLayout(network, input_shape, dropout)
    elif layout == 'bidirectional':
        network = bidirectionalLayout(network, input_shape, dropout)
    elif layout == 'multibidirectional':
        network = multibidirectionalLayout(network, input_shape, dropout)
    #elif layout == 'attention':
    #    network = attentionLayout(network, input_shape)

    # units of last layer should have same amount of nodes as the number of different outputs that our system has
    # last layers are the same for every layout
    # -> assures that the output of the network will map to our classes
    network.add(Dense(units=vokab_length))
    network.add(Activation(activation))

    # compile network
    logger.info("Compiling model...")
    network.compile(_loss=loss,
                    _path=weightsPath,
                    _optimizer=optimizer,
                    _metrics=metrics,
                    _callbacks=callbacks)

    logger.info("Finished compiling.")
    #logger.info("Model Layers: \n[]".format(network._model.summary()))

    return network