예제 #1
0
 def __init__(self, *args):
     sizes = it2list(args)
     it = iter(sizes)
     self.start = layers.Input(next(it))
     layer = self.start
     for size in it:
         layer.append(layers.MLP(size, activations.sigmoid))
         layer = layer.next_
     self.end = layer
예제 #2
0
 def __init__(self, input_size, loss='mean_squared_error'):
     """
     Paramters
     ---------
     input_size : int
         Size of input array
     loss : str
         Name of loss function
     """
     self.loss = getattr(F, loss)
     self.layers = [L.Input(input_size)]
예제 #3
0
파일: main.py 프로젝트: DanBlanaru/MiniNN
def run_classification():
    model = Model.Model()
    model.add_layer(layers.Input(784))
    model.add_layer(layers.Dense(100, activation=af.relu))
    model.add_layer(layers.Dense(10, activation=af.softmax))
    model.compile(losses.crossentropy, optimizers.Adam())

    with gzip.open('data/mnist.pkl.gz', 'rb') as f:
        train_set, validation_set, test_set = pickle.load(f, encoding='latin1')
    n_train = train_set[0].shape[0]
    n_test = test_set[0].shape[0]

    train_set_onehots = helpers.make_onehot_2d(train_set[1], 10)
    test_set_onehots = helpers.make_onehot_2d(test_set[1], 10)
    model.fit(train_set[0], train_set_onehots, 50, 50, metric_dataset_x=test_set[0], metric_dataset_y=test_set_onehots,
              metric_callback=classification_metric_accuracy)
예제 #4
0
파일: main.py 프로젝트: DanBlanaru/MiniNN
def run_regression():
    df = np.array(pd.read_csv('data/Dataset/Training/Features_Variant_1.csv'))
    model = Model.Model()
    model.add_layer(layers.Input(53))
    model.add_layer(layers.Dense(20, activation=af.relu))
    model.add_layer(layers.Dense(1, activation=af.sigmoid))
    model.compile(losses.mse, optimizers.SGD())

    input_set = np.array([x[:-1] for x in df])
    output_set = np.array([x[-1] for x in df]).reshape(len(input_set), 1)

    # Model.save_model(model, "test")
    # tmp = Model.load_model("test")
    # tmp.fit(input_set, output_set, 50, 50, metric_callback=regression_metric_mse)
    input_set = helpers.standard_scaler(input_set)
    output_set = helpers.standard_scaler(output_set)

    np.seterr(all="raise")
    model.fit(input_set, output_set, 50, 100, metric_callback=regression_metric_mse)
    Model.save_model(model,"SGD")
예제 #5
0
def compose_layers(num_classes, layer_dims, shape_input):
    embed_layer = layers.SelfInteractionSimple(layer_dims[0])
    input_layer = layers.Input()
    
    model_layers = []
    for dim in layer_dims[1:]:
        model_layers.append(layers.Convolution())
        model_layers.append(layers.Concatenation())
        model_layers.append(layers.SelfInteraction(dim))
        model_layers.append(layers.Nonlinearity())
    output_layer = layers.Output(num_classes)

    x, rbf, rij = input_layer(shape_input)
    input_tensor_list = {0: [embed_layer(x)]}
    
    for layer in model_layers:
        if isinstance(layer, layers.Convolution):
            input_tensor_list = layer([input_tensor_list, rbf, rij])
        else:
            input_tensor_list = layer(input_tensor_list)
    output = output_layer(input_tensor_list)
    return output
예제 #6
0
def my_conv_net(n_classes):

    # initialization
    training_name = 'mode'
    loss_name = 'loss'
    ground_truth = 'y'

    input_layer = layers.Input()
    inp = layers.L2Norm(0.01, loss_name, input_layer)

    # Convoluton layers
    conv_count = 3
    res_shape = [(32, 32), (16, 16), (8, 8)]
    conv_filters = [(32, 32), (128, 128), (256, 512)]
    conv_shapes = [((3, 3, 3), (3, 3)), ((32, 3, 3), (3, 3)),
                   ((128, 3, 3), (3, 3))]
    for i in range(0, conv_count):
        inp = conv_bn_conv_bn_pool2x2(inp, conv_filters[i], conv_shapes[i],
                                      res_shape[i], training_name)

    flat = 4 * 4 * 512
    inp = layers.Reshape((flat, ), inp)

    # Fully-connected layers
    fc_count = 2
    fc_sizes = [(flat, 2048), (2048, 256)]
    for i in range(0, fc_count):
        inp = fc_bn_dropout(inp, fc_sizes[i], training_name)

    # Last fc layer
    y = layers.Affine((fc_sizes[-1][-1], n_classes), inp)

    loss = layers.SoftmaxLoss(ground_truth, loss_name, y)

    model = net.NeuralNetwork(input_layer, loss, loss_name, ground_truth,
                              training_name, layers.params, layers.grads)

    return model
예제 #7
0
def autoencoder(bioma_shape=717,
                domain_shape=36,
                output_shape=717,
                latent_space=10,
                bioma_layers=[128, 64],
                domain_layers=[32, 16],
                input_transform=CenterLogRatio(),
                output_transform=None,
                activation_function_encoder=layers.ReLU(),
                activation_function_decoder=layers.ReLU(),
                activation_function_latent='tanh',
                ):
    has_domain = domain_shape is not None
    has_bioma = bioma_shape is not None

    if not has_bioma and not has_domain:
        raise Exception('Either bioma or domain has to be expecified.')

    # encoder bioma

    if has_bioma:
        in_bioma = layers.Input(shape=(bioma_shape,), name='bioma_input_{}'.format(bioma_shape))
        net = in_bioma
        if input_transform is not None:
            net = input_transform(net)
        for s in bioma_layers:
            net = layers.Dense(s, activation=activation_function_encoder,
                               name="encoder_bioma_dense_{}".format(s))(net)
        encoded_bioma = layers.Dense(latent_space, activation=activation_function_latent,
                                     name='encoded_bioma_{}'.format(latent_space))(net)
        encoder_bioma = keras.Model(inputs=in_bioma, outputs=encoded_bioma, name='EncoderBioma')
    else:
        encoded_bioma = None
        encoder_bioma = None

    # encoder domain

    if has_domain:
        in_domain = layers.Input(shape=(domain_shape,), name='domain_input_{}'.format(domain_shape))
        net = in_domain
        for s in domain_layers:
            net = layers.Dense(s, activation=activation_function_encoder,
                               name="encoder_domain_dense_{}".format(s))(net)
        encoded_domain = layers.Dense(latent_space, activation=activation_function_latent,
                                      name='encoded_domain_{}'.format(latent_space))(net)
        encoder_domain = keras.Model(inputs=in_domain, outputs=encoded_domain, name='EncoderDomain')
    else:
        encoded_domain = None
        encoder_domain = None

    # decoder bioma for both autoencoders

    in_latent_space = layers.Input(shape=(latent_space,), name='latent_space_input')
    net = in_latent_space
    net_bioma = encoded_bioma
    net_domain = encoded_domain
    for s in reversed(bioma_layers):
        layer = layers.Dense(s, activation=activation_function_decoder,
                             name="decoder_dense_{}".format(s))
        net = layer(net)
        if has_bioma:
            net_bioma = layer(net_bioma)
        if has_domain:
            net_domain = layer(net_domain)

    layer = layers.Dense(output_shape, activation=None, name='decoded_bioma')
    decoded_bioma = layer(net)
    if has_bioma:
        net_bioma = layer(net_bioma)
    if has_domain:
        net_domain = layer(net_domain)

    if output_transform is not None:
        decoded_bioma = output_transform(decoded_bioma)
        if has_bioma:
            net_bioma = output_transform(net_bioma)
        if has_domain:
            net_domain = output_transform(net_domain)

    decoder_bioma = keras.Model(inputs=in_latent_space, outputs=decoded_bioma, name='DecoderBioma')

    # combined model for training

    if has_domain and has_bioma:
        diff_encoders = tf.math.abs(encoded_domain - encoded_bioma, name='diff_encoded')
        diff_encoders = Identity(name='latent')(diff_encoders)
        net_bioma = Identity(name='bioma')(net_bioma)
        net_domain = Identity(name='domain')(net_domain)

        model = keras.Model(inputs=[in_bioma, in_domain],
                            outputs=[net_bioma, net_domain, diff_encoders],
                            name='model')
    else:
        if has_bioma:
            net_bioma = Identity(name='bioma')(net_bioma)
            model = keras.Model(inputs=[in_bioma],
                                outputs=[net_bioma],
                                name='model')
        if has_domain:
            net_domain = Identity(name='domain')(net_domain)
            model = keras.Model(inputs=[in_domain],
                                outputs=[net_domain],
                                name='model')

    return model, encoder_bioma, encoder_domain, decoder_bioma
from net import *
from cs231n.solver import *
import layers

if __name__ == "__main__":
    # Instantiation example
    i1 = layers.Input()
    c1 = layers.Conv((8, 3, 3, 3), {'stride': 1, 'pad': 1}, i1)
    flat = 8 * 28 * 28
    s1 = layers.Reshape((flat, ), c1)
    a1 = layers.Affine((flat, 10), s1)
    l1 = layers.SoftmaxLoss('y', 'loss', a1)
    try:
        layers.load_network('network')
    except IOError:
        pass

    model = NeuralNetwork(i1, l1, 'loss', layers.params, layers.grads)

    data = {
        'X_train': np.ones((2**10, 3, 28, 28)) * 0.1,
        'y_train': np.ones(2**10, dtype=np.int) * 2,
        'X_val': np.ones((2**3, 3, 28, 28)) * 0.1,
        'y_val': np.ones(2**3, dtype=np.int) * 2
    }
    solver = Solver(model,
                    data,
                    update_rule='sgd',
                    optim_config={
                        'learning_rate': 1e-3,
                    },