h2 = self.bnorm3(self.conv2(h1), finetune=test) pad_x = F.concat((x, U.zero_pad(x, self.inch, self.outch))) h3 = h2 + pad_x return h3 class ResidualNN(C.Classifier): layer_num = [32, 32, 64, 128, 256] def __init__(self, initializer, layer_num=layer_num): super().__init__( chain.Res4Chain3(initializer, Block, layer_num, initialBN=True)) export_dir = os.path.join(os.getcwd(), "save") filename, ext = os.path.splitext(__file__) dataset_dir = os.path.join(os.getcwd(), "../dataset") train = Trainer(filename, export_dir, dataset_dir) train.set_networks(ResidualNN, I.HeNormal) train.model_init() lr_update = [[0, 200, 350, 500], [0.3, 0.1, 0.01, 0.001]] train.train_loop(lr_update=lr_update) train.testing() train.export()
from pathlib import Path from keras.optimizers import SGD from Core import Trainer from NN.conv import LeNet def get_model(args): # initialize the model and the model_filename model = None print("[INFO] compiling model...") # build(width, height, depth, classes, nlf='relu') model = LeNet.build(32, 32, 3, 3) model.compile( loss="categorical_crossentropy", optimizer=SGD(lr=0.005), metrics=["accuracy"] ) model_filename = str(Path(args['model'], 'lenet.h5').expanduser()) return model, model_filename trainer = Trainer() model, model_filename = get_model(trainer.args) trainer.execute(model) model.save(model_filename)