Esempio n. 1
0
        norm     = tf.sqrt(tf.reduce_sum(tf.square(gradient),[1,2,3]))
        return tf.abs(tensor[:,c])/norm
    distances = tf.map_fn(doit,tf.range(tensor.shape.as_list()[1]),
                                        dtype=tf.float32)
    return tf.reduce_min(distances,0)




# Create Network
#---------------

dnn = sknet.Network(name='simple_model')

if DATA_AUGMENTATION:
    dnn.append(ops.RandomAxisReverse(dataset.images,axis=[-1]))
    dnn.append(ops.RandomCrop(dnn[-1],(28,28),seed=10))
    start_op = 2
else:
    dnn.append(dataset.images)
    start_op = 1
if MODEL=='cnn':
    sknet.networks.ConvSmall(dnn,dataset.n_classes)
elif MODEL=='dense':
    dnn.append(sknet.ops.Dense(dnn[-1],4096))
    dnn.append(sknet.ops.BatchNorm(dnn[-1],0))
    dnn.append(sknet.ops.Activation(dnn[-1],0.1))

    dnn.append(sknet.ops.Dense(dnn[-1],2048))
    dnn.append(sknet.ops.BatchNorm(dnn[-1],0))
    dnn.append(sknet.ops.Activation(dnn[-1],0.1))
Esempio n. 2
0
dataset['images/train_set'] = dataset['images/train_set'][perm[-100:]]
dataset['labels/train_set'] = dataset['labels/train_set'][perm[-100:]]


iterator = BatchIterator(32, {'train_set': 'random_see_all',
                         'test_set': 'continuous', 'valid_set': 'continuous'})

dataset.create_placeholders(iterator, device="/cpu:0")

# Create Network
# --------------

dnn = sknet.Network(name='simple_model')
images = tf.concat([dnn.images, dnn.uimages], 0)
if DATA_AUGMENTATION:
    dnn.append(ops.RandomAxisReverse(images, axis=[-1]))
    dnn.append(ops.RandomCrop(dnn[-1], (28, 28), seed=10))
else:
    dnn.append(images)

if MODEL == 'cnn':
    sknet.networks.ConvLarge(dnn, dataset.n_classes)
elif MODEL == 'smallresnet':
    sknet.networks.Resnet(dnn, dataset.n_classes, D=2, W=1)
elif MODEL == 'largeresnet':
    sknet.networks.Resnet(dnn, dataset.n_classes, D=4, W=2)

prediction = dnn[-1]

reconstruction = tf.gradients(prediction,images)[0]
loss_recons = sknet.losses.MSE(reconstruction,images)