Example #1
0
	def __init__(self, name):
		with open("{}".format(name), "rb") as f:
			self.net = load_network(f)
		self.test_func = Function().compile([self.net.outputs[0], self.net.loss_var])
		with open("/unsullied/sharefs/liuyanyi02/lyy/CIFAR/meanstd.data", "rb") as f:
			self.mean, self.std = pickle.load(f)
			self.mean = np.array(self.mean).reshape(3, 32, 32)
			self.std = np.array(self.std).reshape(3, 32, 32)
Example #2
0
def bn_post_process(model_file: str, save_model_file: str, data):
    with TrainingEnv(name=model_file + "bn_post_proc", part_count=2) as env:
        net = load_network(open(model_file, "rb"))
        #loss_func = env.make_func_from_loss_var(net.loss_var, "val", train_state = False)

        bn_oprs = [
            opr for opr in net.loss_visitor.all_oprs
            if isinstance(opr, BatchNormalization)
        ]
        bn_inputs = [opr.inputs[0] for opr in bn_oprs]

        mean_Esqr_nodes = []
        for i in bn_inputs:
            if i.partial_shape.ndim == 2:
                mean = i.mean(axis=0).reshape((1, -1))
                mean.vflags.data_parallel_reduce_method = 'sum'
                Esqr = (i**2).mean(axis=0).reshape((1, -1))
                Esqr.vflags.data_parallel_reduce_method = 'sum'
            if i.partial_shape.ndim == 4:
                mean = i.mean(axis=3).mean(axis=2).mean(axis=0).reshape(
                    (1, -1))
                mean.vflags.data_parallel_reduce_method = 'sum'
                Esqr = (i**2).mean(axis=3).mean(axis=2).mean(axis=0).reshape(
                    (1, -1))
                Esqr.vflags.data_parallel_reduce_method = 'sum'
            mean_Esqr_nodes.append(mean)
            mean_Esqr_nodes.append(Esqr)

        func = Function().compile(mean_Esqr_nodes)

        for i in range(len(bn_oprs)):
            opr = bn_oprs[i]
            layer_mean, layer_var = _get_dataset_mean_var(data, func, i)
            if layer_mean.ndim == 0:
                layer_mean = layer_mean.reshape((1, ))
            if layer_var.ndim == 0:
                layer_var = layer_var.reshape((1, ))

            state = opr.State(channels=layer_mean.shape[0],
                              val=[layer_mean, layer_var, 1])
            state.owner_opr_type = type(opr)

            opr.set_opr_state(state)

            opr.freezed = True

        env.register_checkpoint_component("network", net)
        env.save_checkpoint(save_model_file)
Example #3
0
def test(net=None):
    if net == None:
        net = load_network(open("./data/resnet110.data", "rb"))
    test_func = Function().compile(net.outputs[0])
    """
	from megskull.network import NetworkVisitor
	visitor = NetworkVisitor(net.loss_var)
	for i in visitor.all_oprs:
		print(i)
		print("input = ", i.inputs)
		print("output = ", i.outputs)
		a = np.array(i.outputs[0].get_value())
		print(a)
		input()
	a = np.array(visitor.all_oprs_dict["conv1:W"].get_value())
	print(a)
	print("mean = ", np.mean(a, axis = 0))
	print("std = ", np.std(a, axis = 0))
	exit()
	"""
    dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
    data = dic[b'data']
    label = dic[b'labels']

    data = data.astype(np.float32)
    import pickle
    with open("meanstd.data", "rb") as f:
        mean, std = pickle.load(f)
    data = (data - mean) / std
    data = np.resize(data, (10000, 3, 32, 32))
    data = data.astype(np.float32)
    """
	import cv2
	for i in range(10):
		img = data[i].transpose(1, 2, 0)
		img = img[:,::-1,:]
		cv2.imshow('x', img)
		cv2.waitKey(0)
	"""
    #data = data.astype(np.float32)
    #data = (data - 128) / 256

    pred = test_func(data=data)
    print(np.array(pred).shape)
    pred = np.argmax(np.array(pred), axis=1)
    acc = (np.array(pred) == np.array(label)).mean()
    print(acc)
Example #4
0
def myw_test():
    d40_MY = NN("./data/r20_MY.data")
    net = d40_MY.net
    outputs = []
    visitor = NetworkVisitor(net.loss_var)
    for i in visitor.all_oprs:
        if "fc1" in i.name and ":W" not in i.name and ":b" not in i.name:
            outputs.append(i)
    func = Function().compile(outputs)
    data, labels = load_CIFAR_data()
    batch = data[:128]
    batch = batch.reshape(128, 3, 32, 32)
    mean, std = d40_MY.mean, d40_MY.std
    batch = (batch - mean) / std
    outputs_weights = func(data=batch)
    for i in outputs_weights:
        print(i.shape)
        w = i[0]
        w = w.reshape(-1, 4, 4)
        print(w)
        input()
Example #5
0
def test():
    data, labels = load_CIFAR_data()
    p120 = NN("data/p120.data")
    net = p120.net
    loss = net.loss_var
    visitor = NetworkVisitor(loss)
    inp = []
    for i in visitor.all_oprs:
        if "data" in i.name:
            inp.append(i)
        if "conv" in i.name and ":" not in i.name:
            inp.append(i)
    print(inp)
    grad = []
    out = []
    for i in inp:
        grad.append(O.Grad(loss, i))
        out.append(i)
    F = Function()
    F._env.flags.train_batch_normalization = True
    func = F.compile(grad)
    F = Function()
    F._env.flags.train_batch_normalization = True
    func1 = F.compile(out)

    batch = data[:128]
    batch = batch.reshape(128, 3, 32, 32)
    mean, std = p120.mean, p120.std
    batch = (batch - mean) / std
    label = labels[:128]
    grad_out = func(data=batch, label=label)
    lay_out = func1(data=batch, label=label)
    idx = 0
    grad_list = []
    for i, j in zip(grad_out, lay_out):
        print(i.shape, idx)
        idx += 1
        f = i.flatten()
        print("grad")
        print(f)
        print(np.mean(f), np.std(f))
        grad_list.append(np.std(f))
        print("val")
        h = j.flatten()
        print(h)
        print(np.mean(h), np.std(j))
    pickle.dump(grad_list, open("p120_norelu_grad.data", "wb"))
    """
Example #6
0
def init(net, batch):
    visitor = NetworkVisitor(net.loss_var)
    lisk = []
    lisb = []
    for i in visitor.all_oprs:
        if ":k" in i.name and "bnaff" in i.name:
            lisk.append(i)
        if ":b" in i.name and "bnaff" in i.name:
            lisb.append(i)
    for i, k, b in zip(range(len(lisk)), lisk, lisb):
        func = Function().compile(net.outputs)
        outputs = func(data=batch['data'])
        t = outputs[1 + i]
        mean = t.mean(axis=3).mean(axis=2).mean(axis=0)
        std = ((t - mean[np.newaxis, :, np.newaxis, np.newaxis])**2).mean(
            axis=3).mean(axis=2).mean(axis=0)**0.5
        nk = O.ParamProvider("new" + k.name, 1.0 / std)
        nb = O.ParamProvider("new" + b.name, -mean / std)
        visitor.replace_vars([(k, nk), (b, nb)], copy=False)

    visitor = NetworkVisitor(net.loss_var)
    for i in visitor.all_oprs:
        print(i)
    return net
Example #7
0
		train_func = env.make_func_from_loss_var(net.loss_var, "train", train_state = True)
	
		lr = 0.1 * num_GPU
		optimizer = Momentum(lr, 0.9)
		optimizer(train_func)
		
		#train_func.comp_graph.share_device_memory_with(valid_func.comp_graph)
	
		dic = {
			"loss": net.loss_var,
			"pre_loss": preloss,
			"outputs": net.outputs[0]
		}
		train_func.compile(dic)
		valid_func = Function().compile(net.outputs[0])
		
		env.register_checkpoint_component("network", net)
		env.register_checkpoint_component("opt_state", train_func.optimizer_state)
	
		tr_p = InputPipe("lyy.CIFAR10.train", buffer_size = 1000)
		va_p = InputPipe("lyy.CIFAR10.valid", buffer_size = 1000)
		epoch = 0
		EPOCH_NUM = 50000 // minibatch_size
		i = 0
		max_acc = 0
		ORI_IT = 64000
		BN_IT = 10000
		TOT_IT = ORI_IT + BN_IT

		C = CIFAR_test()
Example #8
0
from megskull.network import NetworkVisitor
visitor = NetworkVisitor(net.loss_var)
offsets = []
locs = []
for i in visitor.all_oprs:
    print(i, i.name)
    if "Astype" in i.name:
        locs.append(i)
    if i.name == "conv2offsetx":
        offsets.append(i)
        print("A")
print(len(locs))
locs = locs[::4]
outs = [net.outputs[0]] + locs
test_func = Function().compile(outs)
outs1 = offsets
offs_func = Function().compile(outs1)


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']
Example #9
0
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
from numpy.linalg import svd

net = load_network(open("./data/plain30_orth.data", "rb"))
func = Function().compile(net.outputs)


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']
data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)

idx = np.random.randint(data.shape[0], size=3000)
data = [data[i] for i in idx]

Example #10
0
#!/usr/bin/env mdl
from megbrain.config import set_default_device
from megskull.graph import Function
from neupeak.utils.cli import load_network

import dataset
import cv2
import numpy as np

set_default_device('cpu0')
net = load_network(
    '/home/zhaojing/vehicle_pose/config/xception145/train_log/models/latest')
classify = Function().compile(net.outputs[0])

test_dataset = dataset.get('test')
x = test_dataset.get_epoch_minibatch_iter()

correct = [0, 0]
total_label = [0, 0]
total_pred = [0, 0]
for data in x:
    out = classify(data.data)

    #total += data.label.size
    for i in range(0, data.label.size):
        total_pred[out[i].argmax()] += 1
        total_label[data.label[i]] += 1
        if out[i].argmax() == data.label[i]:
            correct[data.label[i]] += 1

accuracy = [0, 0]
Example #11
0
from megskull.graph import Function
from megskull.optimizer.momentum import Momentum
import network
from megskull.graph import FpropEnv
from meghair.utils.io import dump

func = OptimizableFunc.make_from_loss_var(network.loss)

Momentum(learning_rate=1e-2, momentum=0.9)(func)

func.compile(network.loss)

#env = FpropEnv()
#pred_mgb = env.get_mgbvar(network.pred)
#func_test = env.comp_graph.compile_outonly(pred_mgb)
func_test = Function().compile(network.pred)

import pickle
import gzip
import numpy as np

train_set, valid_set, test_set = pickle.load(gzip.open("mnist.pkl.gz", "rb"),
                                             encoding="latin1")

minibatch_size = network.minibatch_size

l = len(train_set[0])
epoch = 0
for i in range(100000):
    j = i % (l // minibatch_size)
    minibatch = train_set[0][j * minibatch_size:(j + 1) * minibatch_size]