def __init__(self, dataset, *args, **kwargs): super(CNN, self).__init__(*args, **kwargs) model = Sequential() model.add( Conv2D(32, (3, 3), padding='same', input_shape=dataset.input_shape)) model.add(Activation('relu')) model.add(Conv2D(32, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(64, (3, 3), padding='same')) model.add(Activation('relu')) model.add(Conv2D(64, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(dataset.output_size)) model.add(Activation('softmax')) # opt = optimizers.rmsprop(lr=0.0001, decay=1e-6) # opt = optimizers.adam() opt = optimizers.sgd(lr=0.0001) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) self.model = model probabilistic_model = Sequential() probabilistic_model.add( Conv2D(32, (3, 3), padding='same', input_shape=dataset.input_shape)) probabilistic_model.add(Activation('relu')) probabilistic_model.add(Conv2D(32, (3, 3))) probabilistic_model.add(Activation('relu')) probabilistic_model.add(MaxPooling2D(pool_size=(2, 2))) probabilistic_model.add(BayesianDropout(0.25)) probabilistic_model.add(Conv2D(64, (3, 3), padding='same')) probabilistic_model.add(Activation('relu')) probabilistic_model.add(Conv2D(64, (3, 3))) probabilistic_model.add(Activation('relu')) probabilistic_model.add(MaxPooling2D(pool_size=(2, 2))) probabilistic_model.add(BayesianDropout(0.25)) probabilistic_model.add(Flatten()) probabilistic_model.add(Dense(512)) probabilistic_model.add(Activation('relu')) probabilistic_model.add(BayesianDropout(0.5)) probabilistic_model.add(Dense(dataset.output_size)) probabilistic_model.add(Activation('softmax')) # opt = optimizers.rmsprop(lr=0.0001, decay=1e-6) # opt = optimizers.adam() opt = optimizers.sgd(lr=0.0001) probabilistic_model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) self.probabilistic_model = probabilistic_model
def __init__(self, dataset, *args, **kwargs): super(VGGTOP, self).__init__(*args, **kwargs) cfg = { 'VGG11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], 'VGG13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], 'VGG16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], 'VGG19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], } weight_decay = 0.0005 # deterministic model model = Sequential() model.add(Conv2D(64, (3, 3), padding='same', input_shape=dataset.input_shape, kernel_regularizer=l2(weight_decay))) for x in cfg['VGG16'][1:]: if x == 'M': model.add(MaxPooling2D(pool_size=(2, 2))) else: model.add(Conv2D(x, (3, 3), padding='same', kernel_regularizer=l2(weight_decay))) model.add(Activation('relu')) model.add(BatchNormalization()) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(512, kernel_regularizer=l2(weight_decay))) model.add(Activation('relu')) model.add(BatchNormalization()) model.add(Dropout(0.5)) model.add(Dense(dataset.output_size)) model.add(Activation('softmax')) opt = optimizers.Adam() model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) self.model = model # probabilistic model probabilistic_model = Sequential() probabilistic_model.add(Conv2D(64, (3, 3), padding='same', input_shape=dataset.input_shape, kernel_regularizer=l2(weight_decay))) for x in cfg['VGG16'][1:]: if x == 'M': probabilistic_model.add(MaxPooling2D(pool_size=(2, 2))) else: probabilistic_model.add(Conv2D(x, (3, 3), padding='same', kernel_regularizer=l2(weight_decay))) probabilistic_model.add(Activation('relu')) probabilistic_model.add(BatchNormalization()) probabilistic_model.add(BayesianDropout(0.25)) probabilistic_model.add(Flatten()) probabilistic_model.add(Dense(512, kernel_regularizer=l2(weight_decay))) probabilistic_model.add(Activation('relu')) probabilistic_model.add(BatchNormalization()) probabilistic_model.add(BayesianDropout(0.5)) probabilistic_model.add(Dense(dataset.output_size)) probabilistic_model.add(Activation('softmax')) opt = optimizers.Adam() probabilistic_model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) self.probabilistic_model = probabilistic_model
def __init__(self, dataset, layers=[50], dropout=0.5, *args, **kwargs): super(MLP, self).__init__(*args, **kwargs) self.dropout = dropout tau = 0.159707652696 # obtained from BO lengthscale = 1e-2 reg = lengthscale**2 * (1 - dropout) / (2. * len(dataset.x_train) * tau) # deterministic model model = Sequential() # model.add(Dropout(dropout, input_shape=dataset.input_shape)) model.add(Dense(layers[0], input_shape=dataset.input_shape)) model.add(Dropout(dropout)) model.add(Activation('relu')) for units in layers[1:]: model.add(Dense(units)) model.add(Dropout(dropout)) model.add(Activation('relu')) model.add(Dense(dataset.output_size)) # probabilistic model probabilistic_model = Sequential() # probabilistic_model.add(BayesianDropout(dropout, input_shape=dataset.input_shape)) probabilistic_model.add( Dense(layers[0], input_shape=dataset.input_shape)) probabilistic_model.add(BayesianDropout(dropout)) probabilistic_model.add(Activation('relu')) for units in layers[1:]: probabilistic_model.add(Dense(units)) probabilistic_model.add(BayesianDropout(dropout)) probabilistic_model.add(Activation('relu')) probabilistic_model.add(Dense(dataset.output_size)) opt = optimizers.Adam() if dataset.type == 'classification': model.add(Activation('softmax')) probabilistic_model.add(Activation('softmax')) compile_params = { 'loss': 'categorical_crossentropy', 'optimizer': opt, 'metrics': ['accuracy'] } else: compile_params = { 'loss': nll_gaussian, 'optimizer': opt, } model.compile(**compile_params) probabilistic_model.compile(**compile_params) self.model = model self.probabilistic_model = probabilistic_model
def __init__(self, input_shape, num_classes, epochs=10, batch_size=32): self.input_shape = input_shape self.num_classes = num_classes self.epochs = epochs self.batch_size = batch_size model = Sequential() model.add(Conv2D(32, (3, 3), padding='same', input_shape=input_shape)) model.add(Activation('relu')) model.add(Conv2D(32, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Conv2D(64, (3, 3), padding='same')) model.add(Activation('relu')) model.add(Conv2D(64, (3, 3))) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Dropout(0.25)) model.add(Flatten()) model.add(Dense(512)) model.add(Activation('relu')) model.add(Dropout(0.5)) model.add(Dense(num_classes)) model.add(Activation('softmax')) opt = optimizers.rmsprop(lr=0.0001, decay=1e-6) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) self.model = model probabilistic_model = Sequential() probabilistic_model.add(Conv2D(32, (3, 3), padding='same', input_shape=input_shape)) probabilistic_model.add(Activation('relu')) probabilistic_model.add(Conv2D(32, (3, 3))) probabilistic_model.add(Activation('relu')) probabilistic_model.add(MaxPooling2D(pool_size=(2, 2))) probabilistic_model.add(BayesianDropout(0.25)) probabilistic_model.add(Conv2D(64, (3, 3), padding='same')) probabilistic_model.add(Activation('relu')) probabilistic_model.add(Conv2D(64, (3, 3))) probabilistic_model.add(Activation('relu')) probabilistic_model.add(MaxPooling2D(pool_size=(2, 2))) probabilistic_model.add(BayesianDropout(0.25)) probabilistic_model.add(Flatten()) probabilistic_model.add(Dense(512)) probabilistic_model.add(Activation('relu')) probabilistic_model.add(BayesianDropout(0.5)) probabilistic_model.add(Dense(num_classes)) probabilistic_model.add(Activation('softmax')) opt = optimizers.rmsprop(lr=0.0001, decay=1e-6) probabilistic_model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy']) self.probabilistic_model = probabilistic_model
def __init__(self, dataset, *args, **kwargs): super(VGG, self).__init__(*args, **kwargs) model = VGG16(include_top=False, input_shape=dataset.input_shape) x = Flatten(name='flatten')(model.output) x = Dense(512, activation='relu', name='fc1')(x) x = Dropout(0.5)(x) # x = Dense(512, activation='relu', name='fc2')(x) # x = Dropout(0.5)(x) x = Dense(dataset.output_size, activation='softmax', name='predictions')(x) self.model = Model(inputs=model.input, outputs=x) opt = optimizers.sgd(lr=0.0001) self.model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) probabilistic_model = VGG16(include_top=False, input_shape=dataset.input_shape) x = Flatten(name='flatten')(probabilistic_model.output) x = Dense(512, activation='relu', name='fc1')(x) x = BayesianDropout(0.5)(x) # x = Dense(512, activation='relu', name='fc2')(x) # x = BayesianDropout(0.5)(x) x = Dense(dataset.output_size, activation='softmax', name='predictions')(x) self.probabilistic_model = Model(inputs=probabilistic_model.input, outputs=x) opt = optimizers.sgd(lr=0.0001) self.probabilistic_model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])