Beispiel #1
0
def ResBlock(layer, filters, nconv, half):
    in_ = layer
    strides = [2, 2] if half else [1, 1]
    layer = eddl.ReLu(BG(eddl.Conv(layer, filters, [3, 3], strides)))
    for i in range(nconv - 1):
        layer = eddl.ReLu(BG(eddl.Conv(layer, filters, [3, 3], [1, 1])))
    if (half):
        return eddl.Add(BG(eddl.Conv(in_, filters, [1, 1], [2, 2])), layer)
    else:
        return eddl.Add(layer, in_)
Beispiel #2
0
def UNetWithPadding(layer):
    x = layer
    depth = 32

    x = LBC(x, depth, [3, 3], [1, 1], "same")
    x = LBC(x, depth, [3, 3], [1, 1], "same")
    x2 = eddl.MaxPool(x, [2, 2], [2, 2])
    x2 = LBC(x2, 2*depth, [3, 3], [1, 1], "same")
    x2 = LBC(x2, 2*depth, [3, 3], [1, 1], "same")
    x3 = eddl.MaxPool(x2, [2, 2], [2, 2])
    x3 = LBC(x3, 4*depth, [3, 3], [1, 1], "same")
    x3 = LBC(x3, 4*depth, [3, 3], [1, 1], "same")
    x4 = eddl.MaxPool(x3, [2, 2], [2, 2])
    x4 = LBC(x4, 8*depth, [3, 3], [1, 1], "same")
    x4 = LBC(x4, 8*depth, [3, 3], [1, 1], "same")
    x5 = eddl.MaxPool(x4, [2, 2], [2, 2])
    x5 = LBC(x5, 8*depth, [3, 3], [1, 1], "same")
    x5 = LBC(x5, 8*depth, [3, 3], [1, 1], "same")
    x5 = eddl.BatchNormalization(eddl.Conv(
        eddl.UpSampling(x5, [2, 2]), 8*depth, [3, 3], [1, 1], "same"
    ), True)

    x4 = eddl.Concat([x4, x5]) if USE_CONCAT else eddl.Add([x4, x5])
    x4 = LBC(x4, 8*depth, [3, 3], [1, 1], "same")
    x4 = LBC(x4, 8*depth, [3, 3], [1, 1], "same")
    x4 = eddl.BatchNormalization(eddl.Conv(
        eddl.UpSampling(x4, [2, 2]), 4*depth, [3, 3], [1, 1], "same"
    ), True)

    x3 = eddl.Concat([x3, x4]) if USE_CONCAT else eddl.Add([x3, x4])
    x3 = LBC(x3, 4*depth, [3, 3], [1, 1], "same")
    x3 = LBC(x3, 4*depth, [3, 3], [1, 1], "same")
    x3 = eddl.Conv(
        eddl.UpSampling(x3, [2, 2]), 2*depth, [3, 3], [1, 1], "same"
    )

    x2 = eddl.Concat([x2, x3]) if USE_CONCAT else eddl.Add([x2, x3])
    x2 = LBC(x2, 2*depth, [3, 3], [1, 1], "same")
    x2 = LBC(x2, 2*depth, [3, 3], [1, 1], "same")
    x2 = eddl.BatchNormalization(eddl.Conv(
        eddl.UpSampling(x2, [2, 2]), depth, [3, 3], [1, 1], "same"
    ), True)

    x = eddl.Concat([x, x2]) if USE_CONCAT else eddl.Add([x, x2])
    x = LBC(x, depth, [3, 3], [1, 1], "same")
    x = LBC(x, depth, [3, 3], [1, 1], "same")
    x = eddl.BatchNormalization(eddl.Conv(x, 1, [1, 1]), True)

    return x
Beispiel #3
0
def resnet_block(l0, nf, bn, reps, downsample):
    for i in range(reps):
        stri = 2 if (downsample and i == 0) else 1

        l1 = eddl.GlorotUniform(
            eddl.Conv(l0, nf, [1, 1], [stri, stri], "same", False))
        if (bn):
            l1 = eddl.BatchNormalization(l1, 0.99, 0.001, True, "")
        l1 = eddl.ReLu(l1)

        l1 = eddl.GlorotUniform(
            eddl.Conv(l1, nf, [3, 3], [1, 1], "same", False))
        if (bn):
            l1 = eddl.BatchNormalization(l1, 0.99, 0.001, True, "")
        l1 = eddl.ReLu(l1)

        l1 = eddl.GlorotUniform(
            eddl.Conv(l1, nf * 4, [1, 1], [1, 1], "same", False))
        if (bn):
            l1 = eddl.BatchNormalization(l1, 0.99, 0.001, True, "")

        if (i == 0):
            l0 = eddl.GlorotUniform(
                eddl.Conv(l0, nf * 4, [1, 1], [stri, stri], "same", False))

        l0 = eddl.Add([l0, l1])
        l0 = eddl.ReLu(l0)

    return l0
Beispiel #4
0
def ResBlock(layer, filters, half, expand=0):
    in_ = layer
    layer = eddl.ReLu(
        BG(eddl.Conv(layer, filters, [1, 1], [1, 1], "same", False)))
    strides = [2, 2] if half else [1, 1]
    layer = eddl.ReLu(
        BG(eddl.Conv(layer, filters, [3, 3], strides, "same", False)))
    layer = eddl.ReLu(
        BG(eddl.Conv(layer, 4 * filters, [1, 1], [1, 1], "same", False)))
    if (half):
        return eddl.ReLu(
            eddl.Add(
                BG(eddl.Conv(in_, 4 * filters, [1, 1], [2, 2], "same", False)),
                layer))
    else:
        if expand:
            return eddl.ReLu(
                eddl.Add(
                    BG(
                        eddl.Conv(in_, 4 * filters, [1, 1], [1, 1], "same",
                                  False)), layer))
        else:
            return eddl.ReLu(eddl.Add(in_, layer))
Beispiel #5
0
def dice_loss_pixel(inputs):
    num = eddl.Mult(2, eddl.ReduceSum(eddl.Mult(inputs[0], inputs[1]), [0]))
    den = eddl.ReduceSum(eddl.Add(inputs[0], inputs[1]), [0])
    num = eddl.Add(num, 1)
    den = eddl.Add(den, 1)
    return eddl.Sub(1.0, eddl.Div(num, den))
Beispiel #6
0
def dice_loss_img(inputs):
    num = eddl.Mult(2, eddl.ReduceSum(
        eddl.Mult(inputs[0], inputs[1]), [0, 1, 2]
    ))
    den = eddl.ReduceSum(eddl.Add(inputs[0], inputs[1]), [0, 1, 2])
    return eddl.Sub(1.0, eddl.Div(num, den))