def load_verbatimnet( layer, params='/fileserver/iam/iam-processed/models/fiel_1k.hdf5' ): print "Establishing Fiel's verbatim network" vnet = verbatimnet(layer) loadparams( vnet, params ) vnet.compile( loss='mse', optimizer='sgd' ) print "Compiled neural network up to FC7 layer" return vnet
def load_verbatimnet( layer, input_shape=(1,56,56), params='/fileserver/iam/iam-processed/models/fiel_1k.hdf5' ): print "Establishing Fiel's verbatim network" vnet = verbatimnet(layer=layer, input_shape=input_shape) loadparams( vnet, params ) vnet.compile( loss='mse', optimizer='sgd' ) print "Compiled neural network up to FC7 layer" return vnet
# ### Parameter Definitions labels = h5py.File(hdf5authors, 'r') num_authors=len(labels) num_forms_per_author=-1 shingle_dim=(120,120) batch_size=32 load_size=batch_size*1000 iterations = 1000 lr = 0.001 ### Define your model # Here, we're using the Fiel Network # vnet = load_verbatimnet( 'fc7', paramsfile=paramsfile, compiling=False ) vnet = verbatimnet( layer='fc7', input_shape=(1,)+shingle_dim, compiling=False ) vnet.add(Dense(num_authors)) vnet.add(Activation('softmax')) sgd = SGD(lr=lr, decay=1e-6, momentum=0.9, nesterov=True) vnet.compile(loss='categorical_crossentropy', optimizer=sgd) print "Finished compilation" # ### Minibatcher (to load in your data for each batch) if False: mini_m = Hdf5MiniBatcher(hdf5authors, num_authors, num_forms_per_author, shingle_dim=shingle_dim, default_mode=MiniBatcher.TRAIN, batch_size=batch_size, add_rotation=True) else: mini_m = IAM_MiniBatcher(hdf5authors, num_authors, num_forms_per_author, shingle_dim=shingle_dim, default_mode=MiniBatcher.TRAIN,
# ### Parameter Definitions labels = h5py.File(hdf5authors, 'r') num_authors = len(labels) num_forms_per_author = -1 shingle_dim = (120, 120) batch_size = 32 load_size = batch_size * 1000 iterations = 1000 lr = 0.001 ### Define your model # Here, we're using the Fiel Network # vnet = load_verbatimnet( 'fc7', paramsfile=paramsfile, compiling=False ) vnet = verbatimnet(layer='fc7', input_shape=(1, ) + shingle_dim, compiling=False) vnet.add(Dense(num_authors)) vnet.add(Activation('softmax')) sgd = SGD(lr=lr, decay=1e-6, momentum=0.9, nesterov=True) vnet.compile(loss='categorical_crossentropy', optimizer=sgd) print "Finished compilation" # ### Minibatcher (to load in your data for each batch) if False: mini_m = Hdf5MiniBatcher(hdf5authors, num_authors, num_forms_per_author, shingle_dim=shingle_dim, default_mode=MiniBatcher.TRAIN, batch_size=batch_size,