def pet(memory, hidden_dim, order, learning_rate, mark='pet'): # Initiate a predictor model = NeuralNet(memory_depth=memory, mark=mark) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([memory])) nn.add(Linear(output_dim=hidden_dim, use_bias=False)) nn.add(inter_type=pedia.sum) for i in range(1, order + 1): nn.add_to_last_net(Homogeneous(order=i)) # Build model model.default_build(learning_rate=learning_rate) return model
def rnn0(th): assert isinstance(th, NlsHub) # Initiate a neural net model nn_class = lambda mark: Predictor(mark=mark, net_type=Recurrent) model = NeuralNet(th.memory_depth, mark=th.mark, nn_class=nn_class) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input(sample_shape=[th.memory_depth])) for _ in range(th.num_blocks): nn.add(BasicRNNCell(state_size=th.hidden_dim)) nn.add(Linear(output_dim=1)) # Build model.default_build(th.learning_rate) return model
def tlp(memory_depth, hidden_dim, mark='tlp'): # Hyper-parameters learning_rate = 0.001 # Initiate a predictor model = NeuralNet(memory_depth, mark=mark) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([memory_depth])) nn.add(Linear(output_dim=hidden_dim)) nn.add(Activation('sigmoid')) nn.add(Linear(output_dim=1, use_bias=False)) # Build model model.default_build(learning_rate=learning_rate) return model
def mlp_01(mark, memory_depth, layer_dim, learning_rate, activation='relu'): # Configurations pass # Initiate a predictor model = NeuralNet(memory_depth, mark=mark) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([memory_depth])) nn.add(Linear(output_dim=layer_dim)) nn.add(Activation(activation)) nn.add(Linear(output_dim=1)) # Build model model.default_build(learning_rate) # Return model return model
def svn(memory_depth, order, hidden_dim, mark='svn'): # Hyper-parameters learning_rate = 0.001 # Initiate a predictor model = NeuralNet(memory_depth, mark=mark) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([memory_depth])) nn.add(Linear(output_dim=hidden_dim)) nn.add(Polynomial(order=order)) nn.add(Linear(output_dim=1, use_bias=False)) # Build model # optimizer = tf.train.GradientDescentOptimizer(learning_rate) optimizer = tf.train.AdamOptimizer(learning_rate) model.default_build(optimizer=optimizer, learning_rate=learning_rate) return model
def mlp_00(th): assert isinstance(th, NlsHub) # Initiate a predictor model = NeuralNet(th.memory_depth, mark=th.mark, nn_class=Predictor) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([th.memory_depth])) for i in range(th.num_blocks): nn.add( Linear(output_dim=th.hidden_dim, weight_regularizer=th.regularizer, strength=th.reg_strength)) nn.add(Activation(th.actype1)) nn.add(Linear(output_dim=1)) # Build model model.default_build(th.learning_rate) # Return model return model
def bres_net_wid0(th, activation='relu'): assert isinstance(th, NlsHub) # Initiate a neural net model th.mark = '{}-{}'.format(th.mark, 'wid') model = NeuralNet(th.memory_depth, mark=th.mark, nn_class=BResNet) nn = model.nn assert isinstance(nn, BResNet) # Add layers nn.add(Input([th.memory_depth])) nn._inter_type = pedia.fork for _ in range(th.num_blocks): branch = nn.add() branch.add(Linear(output_dim=th.hidden_dim)) branch.add(Activation(activation)) branch.add(Linear(output_dim=1)) # Build model.default_build(th.learning_rate) # Return model return model
def bres_net_dep0(th, activation='relu'): assert isinstance(th, NlsHub) # Initiate a neural net model th.mark = '{}-{}'.format(th.mark, 'dep') model = NeuralNet(th.memory_depth, mark=th.mark, nn_class=BResNet) nn = model.nn assert isinstance(nn, BResNet) # Add layers nn.add(Input([th.memory_depth])) for _ in range(th.num_blocks): nn.add( Linear(output_dim=th.hidden_dim, weight_regularizer=th.regularizer, strength=th.reg_strength)) nn.add(Activation(activation)) branch = nn.add_branch() branch.add(Linear(output_dim=1)) # Build model.default_build(th.learning_rate) # Return model return model
def net_00(memory_depth, learning_rate=0.001): # Configuration hidden_dim = 10 homo_order = 4 mark = 'net_h{}_homo{}'.format(hidden_dim, homo_order) # Initiate a predictor model = NeuralNet(memory_depth, mark=mark) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([memory_depth])) nn.add(Linear(output_dim=hidden_dim)) nn.add(inter_type=pedia.sum) for i in range(1, homo_order + 1): nn.add_to_last_net(Homogeneous(i)) # Build model model.default_build(learning_rate) # Return model return model
def svn_00(memory, learning_rate=0.001): # Configuration D = memory hidden_dims = [2 * D] * 3 p_order = 2 mark = 'svn_{}_{}'.format(hidden_dims, p_order) # Initiate a predictor model = NeuralNet(memory, mark=mark) nn = model.nn assert isinstance(nn, Predictor) # Add layers nn.add(Input([D])) for dim in hidden_dims: nn.add(Linear(output_dim=dim)) nn.add(Polynomial(p_order)) nn.add(Linear(output_dim=1)) # Build model model.default_build(learning_rate) return model
def mlp02(mark, memory_depth, layer_num, hidden_dim, learning_rate, activation, identity_init=True): # Initiate a neural net if identity_init: model = NeuralNet(memory_depth, mark=mark, bamboo=True, identity_initial=True) else: model = NeuralNet(memory_depth, mark=mark, bamboo=True) nn = model.nn assert isinstance(nn, Bamboo) # Add layers nn.add(Input([memory_depth])) for _ in range(layer_num): nn.add(Linear(output_dim=hidden_dim)) nn.add(Activation(activation)) branch = nn.add_branch() branch.add(Linear(output_dim=1)) nn.add(Linear(output_dim=hidden_dim)) nn.add(Activation(activation)) nn.add(Linear(output_dim=1)) # Build model model.default_build(learning_rate) # Return model return model