def model(th): assert isinstance(th, m.Config) model = m.get_container(th, flatten=False) model.add(m.Conv2D(64, kernel_size=3, strides=1)) model.add(m.Activation('relu')) model.add(m.MaxPool2D(2, strides=2)) model.add(m.Conv2D(192, kernel_size=3, strides=1)) model.add(m.Activation('relu')) model.add(m.MaxPool2D(2, strides=2)) for filters in (384, 256, 256): model.add(m.Conv2D(filters, kernel_size=3)) model.add(m.Activation('relu')) model.add(m.MaxPool2D(3, strides=2)) model.add(m.Flatten()) for dim in (2048, 2048): if th.dropout > 0: model.add(m.Dropout(1. - th.dropout)) model.add(m.Linear(dim)) model.add(m.Activation('relu')) return m.finalize(th, model)
def model(th): assert isinstance(th, m.Config) model = m.get_container(th, flatten=True) model.add(Dense(th.layer_width, th.spatial_activation)) model.add(Highway( th.layer_width, th.num_layers, th.spatial_activation, t_bias_initializer=th.bias_initializer)) # model.register_extractor(m.LinearHighway.extractor) return m.finalize(th, model)
def model(th): assert isinstance(th, m.Config) model = m.get_container(th, flatten=True) for dim in th.fc_dims: model.add(m.Linear(dim)) if th.use_batchnorm: model.add(m.BatchNormalization()) model.add(m.Activation(th.spatial_activation)) if th.dropout > 0: model.add(m.Dropout(train_keep_prob=1. - th.dropout)) return m.finalize(th, model)
def model(th): assert isinstance(th, m.Config) model = m.get_container(th, flatten=True) model.add(Dense(th.layer_width, th.spatial_activation)) model.add( SLHighway( config_string=th.group_string, num_layers=th.num_layers, head_size=th.head_size, activation=th.spatial_activation, gutter=th.gutter, gutter_bias=th.gutter_bias, )) # model.register_extractor(m.LinearHighway.extractor) return m.finalize(th, model)