Esempio n. 1
0
    def instantiate_model(self):
        self.model = create_model_resnet(
            self.input_shape,
            n_output=self.n_output,
            normalize=self.normalize,
            kernel_shape=self.kernel_shape,
            size_blocks=self.size_blocks,
            resnet=self.resnet,
            dropout=self.dropout,
            n_channels_by_block=self.n_channels_by_block,
            size_dense=self.size_dense,
            average_pooling=self.average_pooling,
            separable_conv=self.separable_conv)
        print(self.model.summary())

        self.optimizer = optimizers.Adamax(lr=self.lr) if self.optimizer == 'adamax' \
                    else optimizers.RMSprop(lr=self.lr) if self.optimizer == 'rmsprop' \
                    else optimizers.SGD(lr=self.lr, momentum=.9) if self.optimizer == 'sgd' \
                    else optimizers.Adam(lr=self.lr) if self.optimizer == 'adam' else None

        if self.zoom:
            self.datagen = ImageDataGenerator(rotation_range=10,
                                              width_shift_range=0.1,
                                              height_shift_range=0.1,
                                              zoom_range=0.1,
                                              horizontal_flip=True,
                                              fill_mode='nearest')
        elif self.shift:
            self.datagen = ImageDataGenerator(width_shift_range=0.1,
                                              height_shift_range=0.1,
                                              fill_mode='nearest')
        elif self.flip:
            self.datagen = ImageDataGenerator(horizontal_flip=bool(self.flip))
        else:
            self.datagen = None
Esempio n. 2
0
    def instantiate_model(self):
        self.model = create_model_resnet(
            self.input_shape,
            n_output=self.n_output,
            normalize=self.normalize,
            kernel_shape=self.kernel_shape,
            size_blocks=self.size_blocks,
            resnet=self.resnet,
            dropout=self.dropout,
            n_channels_by_block=self.n_channels_by_block,
            size_dense=self.size_dense,
            average_pooling=self.average_pooling,
            separable_conv=self.separable_conv)
        print(self.model.summary())

        self.optimizer = optimizers.Adamax(lr=self.lr) if self.optimizer == 'adamax' \
                    else optimizers.RMSprop(lr=self.lr) if self.optimizer == 'rmsprop' \
                    else optimizers.SGD(lr=self.lr, momentum=.9) if self.optimizer == 'sgd' \
                    else optimizers.Adam(lr=self.lr) if self.optimizer == 'adam' else None

        # TODO
        self.datagen = None
Esempio n. 3
0
 def testAdamaxCompatibility(self):
     opt_v1 = optimizers.Adamax(lr=0.01)
     opt_v2 = adamax.Adamax(learning_rate=0.01)
     self._testOptimizersCompatibility(opt_v1, opt_v2)
    vgg = applications.vgg16.VGG16()
    cnn = Sequential()
    for capa in vgg.layers:
        cnn.add(capa)
    cnn.layers.pop()
    for layer in cnn.layers:
        layer.trainable = False
    cnn.add(Dense(clases, activation='softmax'))

    return cnn


cnn = modelo()

cnn.compile(loss='categorical_crossentropy',
            optimizer=optimizers.Adamax(lr=lr),
            metrics=['accuracy'])

#entrenamiento
cnn.fit_generator(entrenamiento_generador,
                  steps_per_epoch=pasos_entrenamiento,
                  epochs=epocas,
                  validation_data=validacion_generador,
                  validation_steps=pasos_validacion,
                  callbacks=[tensorboard, detener])

target_dir = './modelo_cnn/'
if not os.path.exists(target_dir):
    os.mkdir(target_dir)
cnn.save('./modelo_cnn/modelo2.h5')
cnn.save_weights('./modelo_cnn/pesos2.h5')
Esempio n. 5
0
def optimizer(name='adam', l_rate=0.01, decay=0.0, **kwargs):
    '''
    Define the optimizer by default parameters except learning rate.
    Note that most of optimizers do not suggest users to modify their
    speically designed parameters.
    We suggest users to specify gamma according to the practice when
    using Adabound optimizers.
    Options:
        name: the name of optimizer (default='adam') (available: 'adam', 
              'amsgrad', 'adamax', 'adabound', 'amsbound', 'nadam', 
              'namsgrad', 'nadabound', 'namsbound', 'adadelta', 'rms', 
              'adagrad', 'adamw', 'nmoment', 'sgd', 'proximal')
        l_rate: learning rate (default=0.01)
        decay: decay ratio ('adadeltaDA' do not support this option)
        other parameters: see the usage of the specific optimizer.
    Return:
        the particular optimizer object.
    '''
    name = name.casefold()
    if name == 'adam':
        return optimizers.Adam(l_rate, decay=decay, **kwargs)
    elif name == 'amsgrad':
        return optimizers.Adam(l_rate, decay=decay, amsgrad=True, **kwargs)
    elif name == 'adamax':
        return optimizers.Adamax(l_rate, decay=decay, **kwargs)
    elif name == 'adabound':
        return Adabound(l_rate, decay=decay, **kwargs)
    elif name == 'amsbound':
        return Adabound(l_rate, decay=decay, amsgrad=True, **kwargs)
    elif name == 'nadam':
        return MNadam(l_rate, decay=decay, **kwargs)
    elif name == 'namsgrad':
        return MNadam(l_rate, decay=decay, amsgrad=True, **kwargs)
    elif name == 'nadabound':
        return Nadabound(l_rate, decay=decay, **kwargs)
    elif name == 'namsbound':
        return Nadabound(l_rate, decay=decay, amsgrad=True, **kwargs)
    elif name == 'adadelta':
        return optimizers.Adadelta(l_rate, decay=decay, **kwargs)
    elif name == 'rms':
        return optimizers.RMSprop(l_rate, decay=decay, **kwargs)
    elif name == 'adagrad':
        return optimizers.Adagrad(l_rate, decay=decay, **kwargs)
    elif name == 'adamw':
        if compat.COMPATIBLE_MODE['1.14']:
            raise ImportError(
                'This optimizer is not allowed for compatibility, because it require contrib lib.'
            )
        _raise_TF_warn()
        if decay != 0.0:
            logging.warning(
                'This optimizer uses \'decay\' as \'weight_decay\'.')
        else:
            raise ValueError('Should use \'decay\' > 0 for AdamW.')
        return weight_decay_optimizers.AdamWOptimizer(weight_decay=decay,
                                                      learning_rate=l_rate,
                                                      **kwargs)
    elif name == 'nmoment':
        return optimizers.SGD(lr=l_rate,
                              momentum=0.9,
                              decay=decay,
                              nesterov=True,
                              **kwargs)
    elif name == 'moment':
        return optimizers.SGD(lr=l_rate,
                              momentum=0.9,
                              decay=decay,
                              nesterov=False,
                              **kwargs)
    elif name == 'sgd':
        return optimizers.SGD(lr=l_rate, decay=decay, **kwargs)
    elif name == 'proximal':
        _raise_TF_warn()
        if decay != 0.0:
            logging.warning('This optimizer does not support \'decay\'.')
        return proximal_gradient_descent.ProximalGradientDescentOptimizer(
            l_rate, **kwargs)
Esempio n. 6
0
    model.add(Dense(512, activation='relu'))
    model.add(Dense(num_roadsign_classes, activation='softmax'))
    model.summary()
    return model


# Namen der Verkehrsschilder werden geladen
sign_names = read_csv('road_signs_names.csv', delimiter=',', dtype=None)

# Wir laden die Bilder und die Labels dank der readTrafficSigns() Funktion
trainImages, trainLabels = load_roadsigns_data('./img')

# Die Verkehrsschilder werden angezeigt
# display_roadsigns_classes()

optimizer = optimizers.Adamax(lr=0.001)
model = build_model()

# Da wir ein Klassifikationsaufgabe haben, verwenden wir categorical_crossentropy
model.compile(loss='categorical_crossentropy',
              optimizer=optimizer,
              metrics=["accuracy"])  #,"mae"])
model.fit(trainImages,
          trainLabels,
          epochs=NUM_EPOCHS,
          batch_size=NUM_BATCHES,
          verbose=1,
          validation_split=0.2)

# Das Model wird gespeichert
model.save('road_signs_model.h5')