Ejemplo n.º 1
0
def training():
    # train_generator, validation_generator = prepare_data()
    # train_generator, validation_generator, test_generator = prepare_data()
    conv_network = ResNet50(include_top=False,
                            weights='imagenet',
                            input_shape=(IMG_WIDTH, IMG_HEIGHT, 3))
    for layer in conv_network.layers[:-3]:
        layer.trainable = False

    model = Sequential()
    model.add(conv_network)
    # model.add(AveragePooling2D((7, 7), name='avg_pool'))
    model.add(Flatten())
    model.add(Dense(1024, activation='relu'))
    model.add(Dropout(0.5))
    model.add(Dense(10, activation='softmax'))
    # model.summary()
    model.compile(loss='categorical_crossentropy',
                  optimizer=Adam(lr=0.001),
                  metrics=['accuracy'])

    print(neuraLint.check(model))
Ejemplo n.º 2
0
import neuraLint

from keras.layers.core import Dense
from keras import Sequential

model = Sequential()
model.add(Dense(10, input_dim=1, activation='relu'))
model.add(Dense(10, activation='relu'))
model.add(Dense(10, activation='relu'))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
print(neuraLint.check(model))
Ejemplo n.º 3
0
                          alpha,
                          depth_multiplier,
                          strides=(2, 2),
                          block_id=4)
x = _depthwise_conv_block(x, 256, alpha, depth_multiplier, block_id=5)
x = _depthwise_conv_block(x,
                          512,
                          alpha,
                          depth_multiplier,
                          strides=(2, 2),
                          block_id=6)
x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=7)
x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=8)
x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=9)
x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=10)
x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=11)
x = _depthwise_conv_block(x,
                          1024,
                          alpha,
                          depth_multiplier,
                          strides=(2, 2),
                          block_id=12)
x = _depthwise_conv_block(x, 1024, alpha, depth_multiplier, block_id=13)

x = Model(img_input, x)
x.compile(optimizer='rmsprop',
          loss='categorical_crossentropy',
          metrics='accuracy')

print(neuraLint.check(x))
Ejemplo n.º 4
0
        pooling_regions = 7
        input_shape = (num_rois, 512, 7, 7)

    out_roi_pool = roiPoolingConv.RoiPoolingConv(pooling_regions, num_rois)([base_layers, input_rois])

    out = TimeDistributed(Flatten(name='flatten'))(out_roi_pool)
    out = TimeDistributed(Dense(4096, activation='relu', name='fc1'))(out)
    out = TimeDistributed(Dropout(0.5))(out)
    out = TimeDistributed(Dense(4096, activation='relu', name='fc2'))(out)
    out = TimeDistributed(Dropout(0.5))(out)

    out_class = TimeDistributed(Dense(nb_classes, activation='softmax', kernel_initializer='zero'),
                                name='dense_class_{}'.format(nb_classes))(out)
    # note: no regression target for bg class
    out_regr = TimeDistributed(Dense(4 * (nb_classes - 1), activation='linear', kernel_initializer='zero'),
                               name='dense_regress_{}'.format(nb_classes))(out)

    return [out_class, out_regr]


img_input = Input(shape=(100, 100, 3))
roi_input = Input(shape=(None, 4))
shared_layers = nn_base(img_input, trainable=True)

# define the RPN, built on the base layers
rpn: List[Optional[Any]] = rpn(shared_layers, 60)
# classifier = classifier(shared_layers, roi_input, 2, 5, trainable=True)

rpn = Model(img_input, rpn[:2])
print(neuraLint.check(rpn))
# model.compile(optimizer='adam', loss=dice_coef_loss, metrics=[dice_coef])
Ejemplo n.º 5
0
model.add(Conv2D(256, (3, 3), padding='same'))
model.add(Dropout(0.2))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=2))

model.add(Conv2D(256, (3, 3), padding='same'))
model.add(Dropout(0.25))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=2))

model.add(Conv2D(256, (3, 3), padding='same'))
model.add(Dropout(0.25))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=2))

model.add(Flatten())
model.add(Dense(256))
model.add(BatchNormalization())
model.add(Activation('relu'))

model.add(Dropout(0.8))
model.add(Dense(3))
model.add(Activation('softmax'))

model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

report = neuraLint.check(model)
print(report)