Esempio n. 1
0
def resnet_block(l0, nf, bn, reps, downsample):
    for i in range(reps):
        stri = 2 if (downsample and i == 0) else 1

        l1 = eddl.GlorotUniform(
            eddl.Conv(l0, nf, [1, 1], [stri, stri], "same", False))
        if (bn):
            l1 = eddl.BatchNormalization(l1, 0.99, 0.001, True, "")
        l1 = eddl.ReLu(l1)

        l1 = eddl.GlorotUniform(
            eddl.Conv(l1, nf, [3, 3], [1, 1], "same", False))
        if (bn):
            l1 = eddl.BatchNormalization(l1, 0.99, 0.001, True, "")
        l1 = eddl.ReLu(l1)

        l1 = eddl.GlorotUniform(
            eddl.Conv(l1, nf * 4, [1, 1], [1, 1], "same", False))
        if (bn):
            l1 = eddl.BatchNormalization(l1, 0.99, 0.001, True, "")

        if (i == 0):
            l0 = eddl.GlorotUniform(
                eddl.Conv(l0, nf * 4, [1, 1], [stri, stri], "same", False))

        l0 = eddl.Add([l0, l1])
        l0 = eddl.ReLu(l0)

    return l0
Esempio n. 2
0
def defblock(l, bn, nf, reps):
    for i in range(reps):
      l = eddl.GlorotUniform(eddl.Conv(l, nf, [3, 3]))
      if bn: 
        l = eddl.BatchNormalization(l, 0.99, 0.001, True, "")
      l = eddl.ReLu(l)
    l = eddl.MaxPool(l, [2, 2], [2, 2], "valid")
    return l
Esempio n. 3
0
def main(args):
    eddl.download_mnist()

    num_classes = 10

    in_ = eddl.Input([784])

    layer = in_
    layer = eddl.ReLu(eddl.GlorotNormal(eddl.Dense(layer, 1024)))
    layer = eddl.ReLu(eddl.GlorotUniform(eddl.Dense(layer, 1024)))
    layer = eddl.ReLu(eddl.RandomNormal(eddl.Dense(layer, 1024)))
    out = eddl.Softmax(eddl.Dense(layer, num_classes))
    net = eddl.Model([in_], [out])

    eddl.build(
        net, eddl.sgd(0.01, 0.9), ["soft_cross_entropy"],
        ["categorical_accuracy"],
        eddl.CS_GPU(mem=args.mem) if args.gpu else eddl.CS_CPU(mem=args.mem))

    eddl.summary(net)
    eddl.plot(net, "model.pdf")

    x_train = Tensor.load("mnist_trX.bin")
    y_train = Tensor.load("mnist_trY.bin")
    x_test = Tensor.load("mnist_tsX.bin")
    y_test = Tensor.load("mnist_tsY.bin")
    if args.small:
        x_train = x_train.select([":6000"])
        y_train = y_train.select([":6000"])
        x_test = x_test.select([":1000"])
        y_test = y_test.select([":1000"])

    x_train.div_(255.0)
    x_test.div_(255.0)

    eddl.fit(net, [x_train], [y_train], args.batch_size, args.epochs)
    eddl.evaluate(net, [x_test], [y_test], bs=args.batch_size)
    print("All done")
Esempio n. 4
0
batch_size = 100
num_classes = 10

bn = int(sys.argv[1]) == 1


inp = eddl.Input([3, 32, 32])
l = inp
l = defblock(l, bn, 64, 2)
l = defblock(l, bn, 128, 2)
l = defblock(l, bn, 256, 3)
l = defblock(l, bn, 512, 3)
l = defblock(l, bn, 512, 3)
l = eddl.Flatten(l)
for i in range(2):
    l = eddl.GlorotUniform(eddl.Dense(l, 4096))
    if(bn):
        l = eddl.BatchNormalization(l, 0.99, 0.001, True, "")
    l = eddl.ReLu(l)

out = eddl.Softmax(eddl.GlorotUniform(eddl.Dense(l, num_classes)))

net = eddl.Model([inp], [out])
eddl.plot(net, "model.pdf")

eddl.build(net,
    eddl.adam(0.0001),
    ["soft_cross_entropy"],
    ["categorical_accuracy"],
    eddl.CS_GPU() if gpu else eddl.CS_CPU()
)
Esempio n. 5
0
    return l0


eddl.download_cifar10()
gpu = int(sys.argv[2]) == 1 if len(sys.argv) > 2 else True

epochs = 10 if gpu else 1
batch_size = 50
num_classes = 10

bn = int(sys.argv[1]) == 1

inp = eddl.Input([3, 32, 32])
l = inp
l = eddl.GlorotUniform(eddl.Conv(l, 64, [7, 7], [2, 2], "same", False))
l = eddl.MaxPool(l, [2, 2], [2, 2], "valid")
l = resnet_block(l, 64, bn, 2, False)
l = resnet_block(l, 128, bn, 2, True)
l = resnet_block(l, 256, bn, 2, True)
l = resnet_block(l, 512, bn, 2, True)
l = eddl.GlobalAveragePool(l)
l = eddl.Flatten(l)

out = eddl.Softmax(eddl.GlorotUniform(eddl.Dense(l, num_classes)))

net = eddl.Model([inp], [out])
eddl.plot(net, "model.pdf")

eddl.build(net, eddl.adam(0.0001), ["soft_cross_entropy"],
           ["categorical_accuracy"],