Exemplo n.º 1
0
def regression_height(inputsize, num_covariates=2, l1_value=0.001):
    mask = scipy.sparse.load_npz('/home/ahilten/repositories/pheno_height/Input_files/SNP_nearest_gene_mask.npz')
    masks = [mask]
    
    input_cov = K.Input((num_covariates,), name='inputs_cov')
    
    inputs_ = K.Input((mask.shape[0],), name='inputs_')
    layer_0 = K.layers.Reshape(input_shape=(mask.shape[0],), target_shape=(inputsize, 1))(inputs_)
    
    layer_1 = LocallyDirected1D(mask=mask, filters=1, input_shape=(inputsize, 1), name="gene_layer")(layer_0)
    layer_1 = K.layers.Flatten()(layer_1)
    layer_1 = K.layers.Activation("relu")(layer_1)
    layer_1 = K.layers.BatchNormalization()(layer_1)
    
    layer_2 = K.layers.Dense(units=10, kernel_regularizer=tf.keras.regularizers.l1(l=l1_value))(layer_1)
    layer_2 = K.layers.Activation("relu")(layer_2) 
    
    layer_3 = K.layers.concatenate([layer_2, input_cov], axis=1)
    layer_3 = K.layers.BatchNormalization()(layer_3)
    layer_3 = K.layers.Dense(units=10)(layer_3)
    layer_3 = K.layers.Activation("relu")(layer_3) 
        
    layer_4 = K.layers.Dense(units=10)(layer_3)
    layer_4 = K.layers.Activation("relu")(layer_4) 
    
    layer_5 = K.layers.Dense(units=1, bias_initializer= tf.keras.initializers.Constant(168))(layer_4)
    layer_5 = K.layers.Activation("relu")(layer_5)
    
    model = K.Model(inputs=[inputs_, input_cov], outputs=layer_5)
    
    print(model.summary())
    
    return model, masks
Exemplo n.º 2
0
def create_network_from_npz(datapath, l1_value=0.01, regression=False):
    print('ToDO: test')
    masks = []
    mask_shapes_x = []
    mask_shapes_y = []

    h5file = tables.open_file(datapath + "genotype.h5", "r")
    inputsize = h5file.root.data.shape[1]
    h5file.close()

    for npz_path in glob.glob(datapath + '/*.npz'):
        mask = scipy.sparse.load_npz(npz_path)
        masks.append(mask)
        mask_shapes_x.append(mask.shape[0])
        mask_shapes_y.append(mask.shape[1])

    for i in range(len(masks)):  # sort all the masks in the correct order
        argsort_x = np.argsort(mask_shapes_x)[::-1]
        argsort_y = np.argsort(mask_shapes_y)[::-1]

        assert all(argsort_x == argsort_y) # check that both dimensions have the same order

        masks  = [masks[i] for i in argsort_y] # sort masks
        mask_shapes_x = mask_shapes_x[argsort_x]
        mask_shapes_y = mask_shapes_y[argsort_y]

        for x in range(len(masks)-1): # check that the masks fit eachother
            assert mask_shapes_y[x] == mask_shapes_x[x + 1]

    assert mask_shapes_x[0] == inputsize
    if mask_shapes_y[-1] == 1:     # should we end with a dense layer?
        all_masks_available = True
    else:
        all_masks_available = False

    input_layer = K.Input((inputsize,), name='input_layer')
    model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)

    for i in range(len(masks) - 1):
        mask = masks[i]
        model = layer_block(model, mask, i)

    model = K.layers.Flatten()(model)

    if all_masks_available:
        output_layer = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),
                          name="output_layer")(model)
    else:
        output_layer = K.layers.Dense(units=1, name="output_layer",
                                  kernel_regularizer=tf.keras.regularizers.l1(l=l1_value))(model)
    if regression:
        output_layer = K.layers.Activation("linear")(output_layer)
    else:
        output_layer = K.layers.Activation("sigmoid")(output_layer)

    model = K.Model(inputs=input_layer, outputs=output_layer)

    print(model.summary())

    return model, masks
Exemplo n.º 3
0
 def layer_block(model, mask, i):
     model = LocallyDirected1D(mask=mask,
                               filters=1,
                               input_shape=(mask.shape[0], 1),
                               name="LocallyDirected_" + str(i))(model)
     model = K.layers.Activation("tanh")(model)
     model = K.layers.BatchNormalization(center=False, scale=False)(model)
     return model
Exemplo n.º 4
0
def layer_block(model, mask, i, regression):
    
    if regression:
        activation_type="relu"
    else:
        activation_type="tanh"
    
    model = LocallyDirected1D(mask=mask, filters=1, input_shape=(mask.shape[0], 1),
                              name="LocallyDirected_" + str(i))(model)
    model = K.layers.Activation(activation_type)(model)
    model = K.layers.BatchNormalization(center=False, scale=False)(model)
    return model
Exemplo n.º 5
0
def example_network(inputsize):
    inputs_ = K.Input((inputsize,), name='inputs_')
    mask = scipy.sparse.load_npz('./folder/snps_gene.npz')
    x0 = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(inputs_)
    x1_1 = LocallyDirected1D(mask=mask, filters=1, input_shape=(inputsize, 1), name="gene_layer")(x0)
    x1_out = K.layers.Flatten()(x1_1)
    x1_out = K.layers.Activation("tanh")(x1_out)
    x1_out = K.layers.BatchNormalization(center=False, scale=False, name="inter_out")(x1_out)
    x9 = K.layers.Dense(units=1)(x1_out)
    x9 = K.layers.Activation("sigmoid")(x9)
    model = K.Model(inputs=inputs_, outputs=x9)
    return model
Exemplo n.º 6
0
def example_network():
    mask = scipy.sparse.load_npz('./folder/snps_gene.npz')
    masks = [mask]
    
    inputs_ = K.Input((mask.shape[0],), name='inputs_')
    input_cov = K.Input((num_covariates,), name='inputs_cov')
    
    layer_0 = K.layers.Reshape(input_shape=(mask.shape[0],), target_shape=(inputsize, 1))(inputs_)
    
    layer_1 = LocallyDirected1D(mask=mask, filters=1, input_shape=(inputsize, 1), name="gene_layer")(layer_0)
    layer_1 = K.layers.Flatten()(layer_1)
    layer_1 = K.layers.Activation("relu")(layer_1)
    layer_1 = K.layers.BatchNormalization(center=False, scale=False, name="inter_out")(layer_1)
    
    layer_2 = K.layers.Dense(units=1)(layer_1)
    layer_2 = K.layers.Activation("relu")(layer_2)
    model = K.Model(inputs=[inputs_, input_cov], outputs=layer_2)
    print(model.summary())
    
    return model, masks
Exemplo n.º 7
0
def create_network_from_npz(datapath,
                            inputsize,
                            genotype_path,
                            l1_value=0.01,
                            regression=False,
                            num_covariates=0,
                            mask_order = []):
    print("Creating networks from npz masks")
    print("regression", regression)
    if regression:
        mean_ytrain, negative_values_ytrain = regression_properties(datapath)
    else:
        mean_ytrain = 0
        negative_values_ytrain = False

    masks = []
    mask_shapes_x = []
    mask_shapes_y = []

    print(mask_order)

    if len(mask_order) > 0:  # if mask_order is defined we use this order
        for mask in mask_order:
            mask = scipy.sparse.load_npz(datapath + '/'+str(mask)+'.npz')
            masks.append(mask)
            mask_shapes_x.append(mask.shape[0])
            mask_shapes_y.append(mask.shape[1])

        for x in range(len(masks) - 1):  # check that the masks fit eachother
            assert mask_shapes_y[x] == mask_shapes_x[x + 1]
    else:
        # if mask order is not defined we can sort the mask by the size
        for npz_path in glob.glob(datapath + '/*.npz'):
            mask = scipy.sparse.load_npz(npz_path)
            masks.append(mask)
            mask_shapes_x.append(mask.shape[0])
            mask_shapes_y.append(mask.shape[1])

        for i in range(len(masks)):  # sort all the masks in the correct order
            argsort_x = np.argsort(mask_shapes_x)[::-1]
            argsort_y = np.argsort(mask_shapes_y)[::-1]

            mask_shapes_x = np.array(mask_shapes_x)
            mask_shapes_y = np.array(mask_shapes_y)
            assert all(argsort_x == argsort_y)  # check that both dimensions have the same order

            masks = [masks[i] for i in argsort_y]  # sort masks
            mask_shapes_x = mask_shapes_x[argsort_x]
            mask_shapes_y = mask_shapes_y[argsort_y]

            for x in range(len(masks) - 1):  # check that the masks fit eachother
                assert mask_shapes_y[x] == mask_shapes_x[x + 1]

    assert mask_shapes_x[0] == inputsize
    if mask_shapes_y[-1] == 1:  # should we end with a dense layer?
        all_masks_available = True
    else:
        all_masks_available = False

    input_layer = K.Input((inputsize,), name='input_layer')
    input_cov = K.Input((num_covariates,), name='inputs_cov')

    model = K.layers.Reshape(input_shape=(inputsize,), target_shape=(inputsize, 1))(input_layer)

    for i in range(len(masks)):
        mask = masks[i]
        model = layer_block(model, mask, i, regression)

    model = K.layers.Flatten()(model)

    if all_masks_available:
        model = LocallyDirected1D(mask=masks[-1], filters=1, input_shape=(mask.shape[0], 1),
                                  name="output_layer")(model)
    else:
        model = K.layers.Dense(units=1, name="output_layer",
                               kernel_regularizer=tf.keras.regularizers.l1(l=l1_value)
                               )(model)

    model = add_covariates(model, input_cov, num_covariates, regression, negative_values_ytrain, mean_ytrain)

    output_layer = activation_layer(model, regression, negative_values_ytrain)
    model = K.Model(inputs=[input_layer, input_cov], outputs=output_layer)

    print(model.summary())

    return model, masks