def init(self, lf=False, ol=False): model = Sequential() input_done = False if self.flatten: model.add(Flatten(name="flatten", input_shape=self.in_shape)) input_done = True for i, h in enumerate(self.hiddens): if input_done == False: model.add(Dense(h.neurons, activation='relu', name="additional_hidden_" + str(i), input_shape=self.in_shape)) else: model.add(Dense(h.neurons, activation='relu', name="additional_hidden_" + str(i))) if h.dropout > 0: model.add(Dropout(h.dropout, name="additional_dropout_" + str(i))) if input_done == False: model.add(Dense(self.out_shape, name="dense", input_shape=self.in_shape)) else: model.add(Dense(self.out_shape, name="dense")) model.add(Activation("softmax", name="softmax")) if lf and self.labelflip_decay is not None: model.add(LabelFlipNoise(weight_decay=self.labelflip_decay, trainable=True)) if ol and self.outlier_alpha is not None: model.add(OutlierNoise(alpha=self.outlier_alpha)) self._model = model return self
def new_model(in_shape, out_shape, hiddens=[], lf=False, lf_decay=0.1): # type: (list, list, list(Hidden), bool, float) -> Sequential model = Sequential() model.add(Flatten(name="flatten", input_shape=in_shape)) for i, h in enumerate(hiddens): model.add( Dense(h.neurons, activation='relu', name="additional_hidden_" + str(i))) if h.dropout > 0: model.add(Dropout(h.dropout, name="additional_dropout_" + str(i))) model.add(Dense(out_shape, name="dense")) model.add(Activation("softmax", name="softmax")) if lf: model.add(LabelFlipNoise(weight_decay=lf_decay, trainable=True)) return model
def on_train_begin(self, logs={}): self.model = Sequential() for layer in self.model.layers: if isinstance(layer, LabelFlipNoise): layer = LabelFlipNoise() layer.trainable = True