Exemple #1
0
a = var.Variable((1, 128, 128, 3), 'a')
label = var.Variable([1, 1], 'label')
import random
label.data = np.array([random.randint(1, 9)])
label.data = label.data.astype(int)

conv1_out = op.Conv2D((3, 3, 3, 3),
                      input_variable=a,
                      name='conv1',
                      padding='VALID').output_variables
relu1_out = op.Relu(input_variable=conv1_out, name='relu1').output_variables
pool1_out = op.MaxPooling(ksize=2, input_variable=relu1_out,
                          name='pool1').output_variables
fc1_out = op.FullyConnect(output_num=10, input_variable=pool1_out,
                          name='fc1').output_variables
sf_out = op.SoftmaxLoss(predict=fc1_out, label=label, name='sf').loss

new_conv1 = op.GLOBAL_VARIABLE_SCOPE['conv1']
new_fc1 = op.GLOBAL_VARIABLE_SCOPE['fc1']

conv1 = Conv2D([1, 128, 128, 3], 3, 3, 1, method='VALID')
relu1 = Relu(conv1.output_shape)
pool1 = MaxPooling(conv1.output_shape)
fc1 = FullyConnect(pool1.output_shape, 10)
sf = Softmax(fc1.output_shape)

conv1.weights = new_conv1.weights.data
conv1.bias = new_conv1.bias.data
fc1.weights = new_fc1.weights.data
fc1.bias = new_fc1.bias.data

batch_size = 64
global_step = 0
# set method
for k in var.GLOBAL_VARIABLE_SCOPE:
    s = var.GLOBAL_VARIABLE_SCOPE[k]
    if isinstance(s, var.Variable) and s.learnable:
        s.set_method_adam()

img_placeholder = var.Variable((batch_size, 28, 28, 1), 'input')
label_placeholder = var.Variable([batch_size, 1], 'label')

# set train_op
prediction = inference(img_placeholder, 10)
sf = op.SoftmaxLoss(prediction, label_placeholder, 'sf')


images, labels = load_mnist('./data/mnist')
test_images, test_labels = load_mnist('./data/mnist', 't10k')

# save train curve config
loss_collect = []
acc_collect = []
print ('new')
with open('logs/%s_log.txt'%VERSION, 'wb') as logf:
    for epoch in range(20):
        # random shuffle
        order = np.arange(images.shape[0])
        np.random.shuffle(order)
        _images = images[order]