Exemplo n.º 1
0
def make_network(minibatch_size = 128):
	pre_net = load_network("rand.data")

	inp = pre_net.outputs[-1]
	visitor = NetworkVisitor(inp).all_oprs
	for i in visitor:
		#if isinstance(i, BN):
		#	i.set_freezed()
		if isinstance(i, ParamProvider):
			i.set_freezed()
		if isinstance(i, DataProvider):
			dp = i
	lay = O.ZeroGrad(inp)
	chl = inp.partial_shape[1]

	p = []
	num_blocks = 1
	for tt in range(num_blocks):
		for j in range(2):
			lay = deconv_bn_relu("encoder_deconv_{}{}".format(tt, j), lay, kernel_shape = 3, stride = 1, padding = 0, output_nr_channel = chl)
		#lay = deconv_bn_relu("encoder_deconv_{}1".format(tt), lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = chl)
		p.append(lay)
		if tt != num_blocks:
			lay = deconv_bn_relu("encoder_deconv_{}{}".format(tt, j + 1), lay, kernel_shape = 2, stride = 2, padding = 0, output_nr_channel = chl // 2)
		chl = chl // 2
	lay = deconv_bn_relu("outputs", lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = 3, isbnrelu = False)
	mid = lay.partial_shape[2] // 2
	lay = lay[:, :, mid-16:mid+16, mid-16:mid+16]
	print(lay.partial_shape)
	loss = ((lay - dp)**2).sum(axis = 3).sum(axis = 2).sum(axis = 1).mean()
	network = Network(outputs = [lay, inp] + p)
	network.loss_var = loss
	return network
Exemplo n.º 2
0
def make_network(minibatch_size = 128):
	pre_net = load_network("trained.data")

	inp = pre_net.outputs[-1]
	visitor = NetworkVisitor(inp).all_oprs
	for i in visitor:
		if isinstance(i, BN):
			i.set_freezed()
		if isinstance(i, ParamProvider):
			i.set_freezed()
		if isinstance(i, DataProvider):
			dp = i
	lay = O.ZeroGrad(inp)
	chl = inp.partial_shape[1]

	p = []
	for tt in range(3):
		lay = deconv_bn_relu("encoder_deconv_{}0".format(tt), lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = chl)
		lay = deconv_bn_relu("encoder_deconv_{}1".format(tt), lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = chl)
		p.append(lay)
		if tt != 2:
			lay = deconv_bn_relu("encoder_deconv_{}2".format(tt), lay, kernel_shape = 2, stride = 2, padding = 0, output_nr_channel = chl // 2)
		chl = chl // 2
	lay = deconv_bn_relu("outputs", lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = 3, isbnrelu = False)
	loss = ((lay - dp)**2).sum(axis = 3).sum(axis = 2).sum(axis = 1).mean()
	network = Network(outputs = [lay, inp] + p)
	network.loss_var = loss
	return network
Exemplo n.º 3
0
from megskull.network import Network
from My import MyWeightDecay
from megskull.opr.helper.elemwise_trans import ReLU, Identity

minibatch_size = 10
patch_size = 32
net_name = "test_wc"

inp = O.DataProvider("a", shape=(minibatch_size, 3))
out = O.FullyConnected("fc",
                       inp,
                       output_dim=3,
                       W=C(1),
                       nonlinearity=Identity())
W = out.inputs[1]
loss = O.ZeroGrad(out.sum())
network = Network(outputs=[loss])
network.loss_var = loss
"""
func = OptimizableFunc.make_from_loss_var(loss)
NaiveSGD(1)(func)
func.compile(loss)

print(func())
print(np.array(a.eval(), dtype = np.float32))

loss.Mul_Wc(10)

print(func())
print(np.array(a.eval()))
Exemplo n.º 4
0
import megskull.opr.all as O
import numpy as np

a = O.ParamProvider('a', np.ones((10, )))
b = O.ParamProvider('b', np.ones((10, )))
loss = O.ZeroGrad((a * b).sum())

from My import MyWeightDecay

loss = MyWeightDecay(loss, {"*": 0.001})

from megskull.optimizer import NaiveSGD, OptimizableFunc

func = OptimizableFunc.make_from_loss_var(loss)
NaiveSGD(1)(func)
func.compile(loss)

print(func())
print(np.array(a.eval(), dtype=np.float32))

loss.Mul_Wc(10)

print(func())
print(np.array(a.eval()))

print(func())