示例#1
0
def build_model(input_layer = None):

    #################
    # Regular model #
    #################
    input_size = data_sizes["sliced:data:singleslice"]

    if input_layer:
        l0 = input_layer
    else:
        l0 = nn.layers.InputLayer(input_size)

    l1 = jonas_highway(l0, num_filters=64, num_conv=2, filter_size=(3,3), pool_size=(2,2))
    l2 = jonas_highway(l1, num_filters=128, num_conv=2, filter_size=(3,3), pool_size=(2,2))
    l3 = jonas_highway(l2, num_filters=256, num_conv=3, filter_size=(3,3), pool_size=(2,2))
    l4 = jonas_highway(l3, num_filters=512, num_conv=3, filter_size=(3,3), pool_size=(2,2))
    l5 = jonas_highway(l4, num_filters=512, num_conv=3, filter_size=(3,3), pool_size=(2,2))

    # Systole Dense layers
    ldsys1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
    ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=512, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
    ldsys3 = nn.layers.DenseLayer(ldsys2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)

    ldsys3drop = nn.layers.dropout(ldsys3, p=0.5)  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    # Diastole Dense layers
    lddia1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    lddia1drop = nn.layers.dropout(lddia1, p=0.5)
    lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=512, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    lddia2drop = nn.layers.dropout(lddia2, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)

    lddia3drop = nn.layers.dropout(lddia3, p=0.5)  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)


    return {
        "inputs":{
            "sliced:data:singleslice": l0
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable": {
            ldsys1: l2_weight,
            ldsys2: l2_weight,
            ldsys3: l2_weight_out,
            lddia1: l2_weight,
            lddia2: l2_weight,
            lddia3: l2_weight_out,
        },
        "meta_outputs": {
            "systole": ldsys2,
            "diastole": lddia2,
        }
    }
示例#2
0
def build_model(input_layer=None):

    #################
    # Regular model #
    #################
    input_size = data_sizes["sliced:data:singleslice"]

    if input_layer:
        l0 = input_layer
    else:
        l0 = nn.layers.InputLayer(input_size)

    l1 = jonas_highway(l0,
                       num_filters=64,
                       num_conv=2,
                       filter_size=(3, 3),
                       pool_size=(2, 2))
    l2 = jonas_highway(l1,
                       num_filters=128,
                       num_conv=2,
                       filter_size=(3, 3),
                       pool_size=(2, 2))
    l3 = jonas_highway(l2,
                       num_filters=256,
                       num_conv=3,
                       filter_size=(3, 3),
                       pool_size=(2, 2))
    l4 = jonas_highway(l3,
                       num_filters=512,
                       num_conv=3,
                       filter_size=(3, 3),
                       pool_size=(2, 2))
    l5 = jonas_highway(l4,
                       num_filters=512,
                       num_conv=3,
                       filter_size=(3, 3),
                       pool_size=(2, 2))

    # Systole Dense layers
    ldsys1 = nn.layers.DenseLayer(l5,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
    ldsys2 = nn.layers.DenseLayer(ldsys1drop,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
    ldsys3 = nn.layers.DenseLayer(ldsys2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    ldsys3drop = nn.layers.dropout(
        ldsys3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    # Diastole Dense layers
    lddia1 = nn.layers.DenseLayer(l5,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia1drop = nn.layers.dropout(lddia1, p=0.5)
    lddia2 = nn.layers.DenseLayer(lddia1drop,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia2drop = nn.layers.dropout(lddia2, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    lddia3drop = nn.layers.dropout(
        lddia3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)

    return {
        "inputs": {
            "sliced:data:singleslice": l0
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable": {
            ldsys1: l2_weight,
            ldsys2: l2_weight,
            ldsys3: l2_weight_out,
            lddia1: l2_weight,
            lddia2: l2_weight,
            lddia3: l2_weight_out,
        },
        "meta_outputs": {
            "systole": ldsys2,
            "diastole": lddia2,
        }
    }
def build_model():

    #################
    # Regular model #
    #################
    input_key = "sliced:data:singleslice:difference"
    input_size = data_sizes[input_key]

    l0 = InputLayer(input_size)
    # add channel layer
    # l0r = reshape(l0, (-1, 1, ) + input_size[1:])

    # (batch, channel, time, x, y)

    l0_n = batch_norm(l0)

    l = jonas_highway(
        l0_n,
        num_filters=64,
        filter_size=(3, 3),
        axis=(2, 3),
        channel=1,
        W=lasagne.init.Orthogonal(),
        b=lasagne.init.Constant(0.1),
    )
    l = batch_norm(l)

    l = jonas_highway(
        l,
        num_filters=64,
        filter_size=(3, 3),
        axis=(2, 3),
        channel=1,
        W=lasagne.init.Orthogonal(),
        b=lasagne.init.Constant(0.1),
    )
    l = batch_norm(l)

    l = jonas_highway(
        l,
        num_filters=64,
        filter_size=(3, 3),
        num_conv=2,
        axis=(2, 3),
        channel=1,
        W=lasagne.init.Orthogonal(),
        b=lasagne.init.Constant(0.1),
    )
    l = batch_norm(l)

    l_dense = lasagne.layers.DenseLayer(
        lasagne.layers.DropoutLayer(l),
        num_units=600,
        nonlinearity=lasagne.nonlinearities.softmax)

    l_systole = CumSumLayer(l_dense)

    #===================================================================================

    l = jonas_highway(
        l0_n,
        num_filters=64,
        filter_size=(3, 3),
        axis=(2, 3),
        channel=1,
        W=lasagne.init.Orthogonal(),
        b=lasagne.init.Constant(0.1),
    )
    l = batch_norm(l)

    l = jonas_highway(
        l,
        num_filters=64,
        filter_size=(3, 3),
        axis=(2, 3),
        channel=1,
        W=lasagne.init.Orthogonal(),
        b=lasagne.init.Constant(0.1),
    )
    l = batch_norm(l)

    l = jonas_highway(
        l,
        num_filters=64,
        filter_size=(3, 3),
        num_conv=2,
        axis=(2, 3),
        channel=1,
        W=lasagne.init.Orthogonal(),
        b=lasagne.init.Constant(0.1),
    )
    l = batch_norm(l)

    l_dense = lasagne.layers.DenseLayer(
        lasagne.layers.DropoutLayer(l),
        num_units=600,
        nonlinearity=lasagne.nonlinearities.softmax)

    l_diastole = CumSumLayer(l_dense)

    return {
        "inputs": {
            input_key: l0
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        }
    }
示例#4
0
def build_model():

    #################
    # Regular model #
    #################
    input_key = "sliced:data:singleslice:difference"
    input_size = data_sizes[input_key]

    l0 = InputLayer(input_size)
    # add channel layer
    # l0r = reshape(l0, (-1, 1, ) + input_size[1:])

    # (batch, channel, time, x, y)

    l0_n = batch_norm(l0)

    l = jonas_highway(l0_n, num_filters=64, filter_size=(3, 3),
                             axis=(2,3), channel=1,
                             W=lasagne.init.Orthogonal(),
                             b=lasagne.init.Constant(0.1),
                             )
    l = batch_norm(l)

    l = jonas_highway(l, num_filters=64, filter_size=(3, 3),
                         axis=(2,3), channel=1,
                         W=lasagne.init.Orthogonal(),
                         b=lasagne.init.Constant(0.1),
                         )
    l = batch_norm(l)

    l = jonas_highway(l, num_filters=64, filter_size=(3, 3),
                        num_conv=2,
                                     axis=(2,3), channel=1,
                                     W=lasagne.init.Orthogonal(),
                                     b=lasagne.init.Constant(0.1),
                                     )
    l = batch_norm(l)

    l_dense = lasagne.layers.DenseLayer(lasagne.layers.DropoutLayer(l),
                              num_units=600,
                              nonlinearity=lasagne.nonlinearities.softmax)


    l_systole = CumSumLayer(l_dense)

    #===================================================================================


    l = jonas_highway(l0_n, num_filters=64, filter_size=(3, 3),
                             axis=(2,3), channel=1,
                             W=lasagne.init.Orthogonal(),
                             b=lasagne.init.Constant(0.1),
                             )
    l = batch_norm(l)

    l = jonas_highway(l, num_filters=64, filter_size=(3, 3),
                         axis=(2,3), channel=1,
                         W=lasagne.init.Orthogonal(),
                         b=lasagne.init.Constant(0.1),
                         )
    l = batch_norm(l)

    l = jonas_highway(l, num_filters=64, filter_size=(3, 3),
                          num_conv=2,
                                     axis=(2,3), channel=1,
                                     W=lasagne.init.Orthogonal(),
                                     b=lasagne.init.Constant(0.1),
                                     )
    l = batch_norm(l)

    l_dense = lasagne.layers.DenseLayer(lasagne.layers.DropoutLayer(l),
                              num_units=600,
                              nonlinearity=lasagne.nonlinearities.softmax)

    l_diastole = CumSumLayer(l_dense)

    return {
        "inputs":{
            input_key: l0
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        }
    }