コード例 #1
0
def build_model():

    #################
    # Regular model #
    #################
    input_size = data_sizes["sliced:data:singleslice:4ch"]

    l0 = nn.layers.InputLayer(input_size)

    l1a = nn.layers.dnn.Conv2DDNNLayer(l0,  W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l1b = nn.layers.dnn.Conv2DDNNLayer(l1a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l1 = nn.layers.dnn.MaxPool2DDNNLayer(l1b, pool_size=(2,2), stride=(2,2))

    l2a = nn.layers.dnn.Conv2DDNNLayer(l1,  W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l2b = nn.layers.dnn.Conv2DDNNLayer(l2a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l2 = nn.layers.dnn.MaxPool2DDNNLayer(l2b, pool_size=(2,2), stride=(2,2))

    l3a = nn.layers.dnn.Conv2DDNNLayer(l2,  W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l3b = nn.layers.dnn.Conv2DDNNLayer(l3a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l3c = nn.layers.dnn.Conv2DDNNLayer(l3b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l3 = nn.layers.dnn.MaxPool2DDNNLayer(l3c, pool_size=(2,2), stride=(2,2))

    l4a = nn.layers.dnn.Conv2DDNNLayer(l3,  W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l4b = nn.layers.dnn.Conv2DDNNLayer(l4a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l4c = nn.layers.dnn.Conv2DDNNLayer(l4b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l4 = nn.layers.dnn.MaxPool2DDNNLayer(l4c, pool_size=(2,2), stride=(2,2))

    l5a = nn.layers.dnn.Conv2DDNNLayer(l4,  W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l5b = nn.layers.dnn.Conv2DDNNLayer(l5a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l5c = nn.layers.dnn.Conv2DDNNLayer(l5b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
    l5 = nn.layers.dnn.MaxPool2DDNNLayer(l5c, pool_size=(2,2), stride=(2,2))

    # Systole Dense layers
    ldsys1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
    ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=512, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
    ldsys3 = nn.layers.DenseLayer(ldsys2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)

    ldsys3drop = nn.layers.dropout(ldsys3, p=0.5)  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    # Diastole Dense layers
    lddia1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    lddia1drop = nn.layers.dropout(lddia1, p=0.5)
    lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=512, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)

    lddia2drop = nn.layers.dropout(lddia2, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia2drop, num_units=600, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.softmax)

    lddia3drop = nn.layers.dropout(lddia3, p=0.5)  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)


    return {
        "inputs":{
            "sliced:data:singleslice:4ch": l0
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable": {
            ldsys1: l2_weight,
            ldsys2: l2_weight,
            ldsys3: l2_weight_out,
            lddia1: l2_weight,
            lddia2: l2_weight,
            lddia3: l2_weight_out,
        },
    }
コード例 #2
0
def build_model(input_layer=None):

    #################
    # Regular model #
    #################
    input_size = data_sizes["sliced:data:singleslice"]

    if input_layer:
        l0 = input_layer
    else:
        l0 = nn.layers.InputLayer(input_size)

    l1 = jonas_residual(l0,
                        num_filters=64,
                        num_conv=2,
                        filter_size=(3, 3),
                        pool_size=(2, 2))
    l2 = jonas_residual(l1,
                        num_filters=128,
                        num_conv=2,
                        filter_size=(3, 3),
                        pool_size=(2, 2))
    l3 = jonas_residual(l2,
                        num_filters=256,
                        num_conv=3,
                        filter_size=(3, 3),
                        pool_size=(2, 2))
    l4 = jonas_residual(l3,
                        num_filters=512,
                        num_conv=3,
                        filter_size=(3, 3),
                        pool_size=(2, 2))
    l5 = jonas_residual(l4,
                        num_filters=512,
                        num_conv=3,
                        filter_size=(3, 3),
                        pool_size=(2, 2))

    # Systole Dense layers
    ldsys1 = nn.layers.DenseLayer(l5,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
    ldsys2 = nn.layers.DenseLayer(ldsys1drop,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
    ldsys3 = nn.layers.DenseLayer(ldsys2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    ldsys3drop = nn.layers.dropout(
        ldsys3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    # Diastole Dense layers
    lddia1 = nn.layers.DenseLayer(l5,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia1drop = nn.layers.dropout(lddia1, p=0.5)
    lddia2 = nn.layers.DenseLayer(lddia1drop,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia2drop = nn.layers.dropout(lddia2, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    lddia3drop = nn.layers.dropout(
        lddia3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)

    return {
        "inputs": {
            "sliced:data:singleslice": l0
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable": {
            ldsys1: l2_weight,
            ldsys2: l2_weight,
            ldsys3: l2_weight_out,
            lddia1: l2_weight,
            lddia2: l2_weight,
            lddia3: l2_weight_out,
        },
        "meta_outputs": {
            "systole": ldsys2,
            "diastole": lddia2,
        }
    }
コード例 #3
0
def build_model():

    #################
    # Regular model #
    #################
    input_size = data_sizes["sliced:data:sax"]
    input_size_mask = data_sizes["sliced:data:sax:is_not_padded"]
    input_size_locations = data_sizes["sliced:data:sax:locations"]

    l0 = nn.layers.InputLayer(input_size)
    lin_slice_mask = nn.layers.InputLayer(input_size_mask)
    lin_slice_locations = nn.layers.InputLayer(input_size_locations)

    # PREPROCESS SLICES SEPERATELY
    # Convolutional layers and some dense layers are defined in a submodel
    l0_slices = nn.layers.ReshapeLayer(l0, (-1, [2], [3], [4]))

    from . import je_ss_jonisc64small_360
    submodel = je_ss_jonisc64small_360.build_model(l0_slices)

    # Systole Dense layers
    ldsysout = submodel["outputs"]["systole"]
    # Diastole Dense layers
    lddiaout = submodel["outputs"]["diastole"]

    # AGGREGATE SLICES PER PATIENT
    # Systole
    ldsys_pat_in = nn.layers.ReshapeLayer(ldsysout, (-1, nr_slices, [1]))

    ldsys_rnn = rnn_layer(ldsys_pat_in,
                          num_units=256,
                          mask_input=lin_slice_mask)

    #    ldsys_rnn_drop = nn.layers.dropout(ldsys_rnn, p=0.5)

    ldsys3 = nn.layers.DenseLayer(ldsys_rnn,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)
    ldsys3drop = nn.layers.dropout(
        ldsys3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    # Diastole
    lddia_pat_in = nn.layers.ReshapeLayer(lddiaout, (-1, nr_slices, [1]))

    lddia_rnn = rnn_layer(lddia_pat_in,
                          num_units=256,
                          mask_input=lin_slice_mask)

    #    lddia_rnn_drop = nn.layers.dropout(lddia_rnn, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia_rnn,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)
    lddia3drop = nn.layers.dropout(
        lddia3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)

    submodels = [submodel]
    return {
        "inputs": {
            "sliced:data:sax": l0,
            "sliced:data:sax:is_not_padded": lin_slice_mask,
            "sliced:data:sax:locations": lin_slice_locations,
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable":
        dict({
            lddia3: l2_weight_out,
            ldsys3: l2_weight_out,
        }, **{
            k: v
            for d in [
                model["regularizable"] for model in submodels
                if "regularizable" in model
            ] for k, v in list(d.items())
        }),
        "pretrained": {
            je_ss_jonisc64small_360.__name__: submodel["outputs"],
        }
    }
コード例 #4
0
def build_model():

    #################
    # Regular model #
    #################
    input_size_age = data_sizes["sliced:meta:PatientAge"]
    input_size_sex = data_sizes["sliced:meta:PatientSex"]

    l0_age = nn.layers.InputLayer(input_size_age)
    l0_sex = nn.layers.InputLayer(input_size_sex)
    l0 = nn.layers.ConcatLayer([l0_age, l0_sex], axis=1)

    # Systole Dense layers
    ldsys1 = nn.layers.DenseLayer(l0,
                                  num_units=128,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
    ldsys2 = nn.layers.DenseLayer(ldsys1drop,
                                  num_units=128,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
    ldsys3 = nn.layers.DenseLayer(ldsys2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    ldsys3drop = nn.layers.dropout(
        ldsys3, p=0.9
    )  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    # Diastole Dense layers
    lddia1 = nn.layers.DenseLayer(l0,
                                  num_units=128,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia1drop = nn.layers.dropout(lddia1, p=0.5)
    lddia2 = nn.layers.DenseLayer(lddia1drop,
                                  num_units=128,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia2drop = nn.layers.dropout(lddia2, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    lddia3drop = nn.layers.dropout(
        lddia3, p=0.9
    )  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)

    return {
        "inputs": {
            "sliced:meta:PatientAge": l0_age,
            "sliced:meta:PatientSex": l0_sex,
        },
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable": {
            ldsys1: l2_weight,
            ldsys2: l2_weight,
            ldsys3: l2_weight_out,
            lddia1: l2_weight,
            lddia2: l2_weight,
            lddia3: l2_weight_out,
        },
    }
コード例 #5
0
ファイル: j7_meta.py プロジェクト: Keesiu/meta-kaggle
def build_model():

    #import here, such that our global variables are not overridden!
    from . import j6_2ch_128mm, j6_4ch

    meta_2ch = j6_2ch_128mm.build_model()
    meta_4ch = j6_4ch.build_model()

    l_age = nn.layers.InputLayer(data_sizes["sliced:meta:PatientAge"])
    l_sex = nn.layers.InputLayer(data_sizes["sliced:meta:PatientSex"])

    l_meta_2ch_systole = meta_2ch["meta_outputs"]["systole"]
    l_meta_2ch_diastole = meta_2ch["meta_outputs"]["diastole"]

    l_meta_4ch_systole = meta_4ch["meta_outputs"]["systole"]
    l_meta_4ch_diastole = meta_4ch["meta_outputs"]["diastole"]

    l_meta_systole = nn.layers.ConcatLayer(
        [l_age, l_sex, l_meta_2ch_systole, l_meta_4ch_systole])
    l_meta_diastole = nn.layers.ConcatLayer(
        [l_age, l_sex, l_meta_2ch_diastole, l_meta_4ch_diastole])

    ldsys1 = nn.layers.DenseLayer(l_meta_systole,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
    ldsys2 = nn.layers.DenseLayer(ldsys1drop,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
    ldsys3 = nn.layers.DenseLayer(ldsys2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    ldsys3drop = nn.layers.dropout(
        ldsys3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    ldsys3dropnorm = layers.NormalisationLayer(ldsys3drop)
    l_systole = layers.CumSumLayer(ldsys3dropnorm)

    lddia1 = nn.layers.DenseLayer(l_meta_diastole,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia1drop = nn.layers.dropout(lddia1, p=0.5)
    lddia2 = nn.layers.DenseLayer(lddia1drop,
                                  num_units=512,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.rectify)

    lddia2drop = nn.layers.dropout(lddia2, p=0.5)
    lddia3 = nn.layers.DenseLayer(lddia2drop,
                                  num_units=600,
                                  W=nn.init.Orthogonal("relu"),
                                  b=nn.init.Constant(0.1),
                                  nonlinearity=nn.nonlinearities.softmax)

    lddia3drop = nn.layers.dropout(
        lddia3, p=0.5
    )  # dropout at the output might encourage adjacent neurons to correllate
    lddia3dropnorm = layers.NormalisationLayer(lddia3drop)
    l_diastole = layers.CumSumLayer(lddia3dropnorm)

    submodels = [meta_2ch, meta_4ch]
    return {
        "inputs":
        dict(
            {
                "sliced:meta:PatientAge": l_age,
                "sliced:meta:PatientSex": l_sex,
            }, **{
                k: v
                for d in [model["inputs"] for model in submodels]
                for k, v in list(d.items())
            }),
        "outputs": {
            "systole": l_systole,
            "diastole": l_diastole,
        },
        "regularizable":
        dict({}, **{
            k: v
            for d in [
                model["regularizable"] for model in submodels
                if "regularizable" in model
            ] for k, v in list(d.items())
        }),
        "pretrained": {
            j6_2ch_128mm.__name__: meta_2ch["outputs"],
            j6_4ch.__name__: meta_4ch["outputs"],
        },
        "cutoff_gradients": [] + [
            v for d in [
                model["meta_outputs"]
                for model in submodels if "meta_outputs" in model
            ] for v in list(d.values())
        ]
    }