Ejemplo n.º 1
0
def get_model(dhw=[48,48,48],weights_decay = 0.,kernel_initializer='he_uniform',weights=None, deeper=False, activation=lambda: Activation('relu')):
    shape = dhw+[1]
    
    inputs = Input(shape = shape)
    conv1 = Conv3D(32, kernel_size=(3, 3, 3), padding='same', use_bias=False, kernel_initializer=kernel_initializer,kernel_regularizer=l2_penality(weights_decay))(inputs)
    
    id1 = identity_resnet_A(conv1, filters=32, weights_decay=weights_decay, kernel_initializer=kernel_initializer,
                    use_bias=False, bottleneck=2,activation=activation)

    down1 = Reduction_A(id1, filters=64, weights_decay=weights_decay, kernel_initializer=kernel_initializer,
               use_bias=False, bottleneck=2,activation=activation)
    
    id2 = identity_resnet_A(down1, filters=64, weights_decay=weights_decay, kernel_initializer=kernel_initializer,
                    use_bias=False, bottleneck=2,activation=activation)

    down2 = Reduction_A(id2, filters=128, weights_decay=weights_decay, kernel_initializer=kernel_initializer,
               use_bias=False, bottleneck=2,activation=activation)
    
    id3 = identity_resnet_A(down2, filters=128, weights_decay=weights_decay, kernel_initializer=kernel_initializer,
                    use_bias=False, bottleneck=2,activation=activation)

    down3 = Reduction_A(id3, filters=256, weights_decay=weights_decay, kernel_initializer=kernel_initializer,
               use_bias=False, bottleneck=2,activation=activation)
    
       
    pool = GlobalAvgPool3D()(down3)
    outputs = Dense(1, kernel_regularizer=l2_penality(weights_decay), kernel_initializer=kernel_initializer, activation='sigmoid')(pool)
    model = Model(inputs, outputs)
    if weights is not None:
        model.load_weights(weights, by_name=True)
    return model
Ejemplo n.º 2
0
def transmit_block(x, compression, activation, bn_scale, kernel_initializer,
                   weights_decay):
    x = BatchNormalization(scale=bn_scale, axis=-1)(x)
    x = activation()(x)
    if (compression is not None) and (compression > 1):
        *_, f = x.get_shape().as_list()
        x = Conv3D(f // compression,
                   kernel_size=(1, 1, 1),
                   padding='same',
                   use_bias=True,
                   kernel_initializer=kernel_initializer,
                   kernel_regularizer=l2_penalty(weights_decay))(x)
        x = AveragePooling3D((2, 2, 2), padding='valid')(x)
    else:
        x = GlobalAvgPool3D()(x)
    return x
Ejemplo n.º 3
0
def _transmit_block(x, is_last):
    bn_scale = PARAMS['bn_scale']
    activation = PARAMS['activation']
    kernel_initializer = PARAMS['kernel_initializer']
    weight_decay = PARAMS['weight_decay']
    compression = PARAMS['compression']

    x = BatchNormalization(scale=bn_scale, axis=-1)(x)
    x = activation()(x)
    if is_last:
        x = GlobalAvgPool3D()(x)
    else:
        *_, f = x.get_shape().as_list()
        x = Conv3D(f // compression, kernel_size=(1, 1, 1), padding='same', use_bias=True,
                   kernel_initializer=kernel_initializer,
                   kernel_regularizer=l2_penalty(weight_decay))(x)
        x = AveragePooling3D((2, 2, 2), padding='valid')(x)
    return x
Ejemplo n.º 4
0
def conv_layer(inp,
               f,
               k=4,
               s=2,
               p='same',
               act='relu',
               bn=True,
               transpose=False,
               se=False,
               se_ratio=16):
    initializer = act if act is not None else ''
    initializer = 'he_uniform' if initializer.find(
        'relu') != -1 else 'glorot_uniform'
    fun = Conv3DTranspose if transpose else Conv3D
    out = fun(f,
              k,
              strides=s,
              padding=p,
              use_bias=False,
              kernel_initializer=initializer)(inp)
    if bn: out = BatchNormalization()(out)

    if act == 'lrelu':
        out = LeakyReLU(alpha=0.2)(out)
    elif act is not None:
        out = Activation(act)(out)

    # squeeze and excite
    if se:
        out_se = GlobalAvgPool3D()(out)
        r = f // se_ratio if (f // se_ratio) > 0 else 1
        out_se = Reshape((1, 1, f))(out_se)
        out_se = Dense(r,
                       use_bias=False,
                       kernel_initializer='he_uniform',
                       activation='relu')(out_se)
        out_se = Dense(f, use_bias=False, activation='sigmoid')(out_se)
        out = Multiply()([out, out_se])

    return out
convModel = BatchNormalization(axis=-1, center=True, scale=False)(convModel)
convModel = AveragePooling2D(pool_size=2, strides=2)(convModel)
# convModel = GlobalAvgPool2D()(convModel)
convModel = Flatten()(convModel)
scz_sm = Dense(units=512, activation='relu',
               kernel_regularizer=reg1)(convModel)
scz_sm = Dense(units=512, activation='relu', kernel_regularizer=reg1)(scz_sm)
scz_sm = Dropout(rate=DROPOUT)(scz_sm)
scz_sm = Dense(units=64, activation='relu')(scz_sm)

# Second, global pooling that uses the same snp emb data as input
snp_emb = Embedding(input_dim=inputDim,
                    output_dim=outputDim,
                    embeddings_regularizer=reg2,
                    name='SNP_input')(sczInput)
snp_gap = GlobalAvgPool3D()(snp_emb)
snp_gap = Dense(units=64, activation='relu')(snp_gap)

# combine snp conv 2D with snp embedding
concat = concatenate([scz_sm, snp_gap])

dropout = Dropout(rate=DROPOUT)(concat)
combined_output = Dense(units=numClasses, activation='softmax')(dropout)
classifier = Model(inputs=sczInput, outputs=combined_output)
# summarize layers
print("Model summary: \n", classifier.summary())

# compile the model
classifier.compile(
    optimizer=adam,
    # loss=[focal_loss],
                      kernel_regularizer=reg2,
                      activation='relu',
                      dilation_rate=(2, 2),
                      padding='same')(exp_conv)
    exp_conv = BatchNormalization(axis=-1, center=True, scale=False)(exp_conv)
    exp_conv = AveragePooling2D(pool_size=1, strides=1)(exp_conv)
    exp_conv = Flatten()(exp_conv)
    exp_conv = Dense(units=256, activation='relu')(exp_conv)
    exp_conv = Dense(units=64, activation='relu')(exp_conv)

    # Second, exp model which uses the sam exp data as input
    exp_emb = Embedding(input_dim=inputDim,
                        output_dim=outputDim,
                        embeddings_regularizer=reg2,
                        name='EXP_input2')(exp_input)
    exp_gap = GlobalAvgPool3D()(exp_emb)
    exp_gap = Dense(units=64, activation='relu')(exp_gap)

    # combine the SNP layers
    combined1 = add([exp_conv, exp_gap])
    combined2 = multiply([exp_conv, exp_gap])
    combined = concatenate([combined1, combined2])

    combined = Dense(units=64, activation='relu')(combined)
    combined = Dropout(rate=DROPOUT)(combined)
    combined_output = Dense(units=numClasses, activation='softmax')(combined)
    classifier = Model(inputs=exp_input, outputs=combined_output)
    # summarize layers
    print("Model summary: \n", classifier.summary())

    # compile the model