예제 #1
0
파일: network.py 프로젝트: lyuyanyii/CIFAR
def make_network(minibatch_size = 128):
	pre_net = load_network("trained.data")

	inp = pre_net.outputs[-1]
	visitor = NetworkVisitor(inp).all_oprs
	for i in visitor:
		if isinstance(i, BN):
			i.set_freezed()
		if isinstance(i, ParamProvider):
			i.set_freezed()
		if isinstance(i, DataProvider):
			dp = i
	lay = O.ZeroGrad(inp)
	chl = inp.partial_shape[1]

	p = []
	for tt in range(3):
		lay = deconv_bn_relu("encoder_deconv_{}0".format(tt), lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = chl)
		lay = deconv_bn_relu("encoder_deconv_{}1".format(tt), lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = chl)
		p.append(lay)
		if tt != 2:
			lay = deconv_bn_relu("encoder_deconv_{}2".format(tt), lay, kernel_shape = 2, stride = 2, padding = 0, output_nr_channel = chl // 2)
		chl = chl // 2
	lay = deconv_bn_relu("outputs", lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = 3, isbnrelu = False)
	loss = ((lay - dp)**2).sum(axis = 3).sum(axis = 2).sum(axis = 1).mean()
	network = Network(outputs = [lay, inp] + p)
	network.loss_var = loss
	return network
예제 #2
0
파일: network.py 프로젝트: lyuyanyii/CIFAR
def make_network(minibatch_size = 128):
	pre_net = load_network("rand.data")

	inp = pre_net.outputs[-1]
	visitor = NetworkVisitor(inp).all_oprs
	for i in visitor:
		#if isinstance(i, BN):
		#	i.set_freezed()
		if isinstance(i, ParamProvider):
			i.set_freezed()
		if isinstance(i, DataProvider):
			dp = i
	lay = O.ZeroGrad(inp)
	chl = inp.partial_shape[1]

	p = []
	num_blocks = 1
	for tt in range(num_blocks):
		for j in range(2):
			lay = deconv_bn_relu("encoder_deconv_{}{}".format(tt, j), lay, kernel_shape = 3, stride = 1, padding = 0, output_nr_channel = chl)
		#lay = deconv_bn_relu("encoder_deconv_{}1".format(tt), lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = chl)
		p.append(lay)
		if tt != num_blocks:
			lay = deconv_bn_relu("encoder_deconv_{}{}".format(tt, j + 1), lay, kernel_shape = 2, stride = 2, padding = 0, output_nr_channel = chl // 2)
		chl = chl // 2
	lay = deconv_bn_relu("outputs", lay, kernel_shape = 3, stride = 1, padding = 1, output_nr_channel = 3, isbnrelu = False)
	mid = lay.partial_shape[2] // 2
	lay = lay[:, :, mid-16:mid+16, mid-16:mid+16]
	print(lay.partial_shape)
	loss = ((lay - dp)**2).sum(axis = 3).sum(axis = 2).sum(axis = 1).mean()
	network = Network(outputs = [lay, inp] + p)
	network.loss_var = loss
	return network
예제 #3
0
파일: test.py 프로젝트: lyuyanyii/CIFAR
	def __init__(self, name):
		with open("{}".format(name), "rb") as f:
			self.net = load_network(f)
		self.test_func = Function().compile([self.net.outputs[0], self.net.loss_var])
		with open("/unsullied/sharefs/liuyanyi02/lyy/CIFAR/meanstd.data", "rb") as f:
			self.mean, self.std = pickle.load(f)
			self.mean = np.array(self.mean).reshape(3, 32, 32)
			self.std = np.array(self.std).reshape(3, 32, 32)
예제 #4
0
    def __init__(self, model_file, env):
        self.env = env
        self.net = load_network(model_file)
        self.N = 16000
        self.batch_size = 64
        self.nperimage = 20
        self.new_net = load_network(model_file)
        print("finished loading weights")
        self.oprs_dict = self.net.loss_visitor.all_oprs_dict
        self.new_oprs_dict = self.new_net.loss_visitor.all_oprs_dict

        self.convs = self.get_convs_from_net()
        print(self.convs)
        data_func = self.get_data(is_val=False)
        data_func_val = self.get_data(is_val=True)
        self.data_iter = data_func()
        self.data_iter_val = data_func_val()
예제 #5
0
def bn_post_process(model_file: str, save_model_file: str, data):
    with TrainingEnv(name=model_file + "bn_post_proc", part_count=2) as env:
        net = load_network(open(model_file, "rb"))
        #loss_func = env.make_func_from_loss_var(net.loss_var, "val", train_state = False)

        bn_oprs = [
            opr for opr in net.loss_visitor.all_oprs
            if isinstance(opr, BatchNormalization)
        ]
        bn_inputs = [opr.inputs[0] for opr in bn_oprs]

        mean_Esqr_nodes = []
        for i in bn_inputs:
            if i.partial_shape.ndim == 2:
                mean = i.mean(axis=0).reshape((1, -1))
                mean.vflags.data_parallel_reduce_method = 'sum'
                Esqr = (i**2).mean(axis=0).reshape((1, -1))
                Esqr.vflags.data_parallel_reduce_method = 'sum'
            if i.partial_shape.ndim == 4:
                mean = i.mean(axis=3).mean(axis=2).mean(axis=0).reshape(
                    (1, -1))
                mean.vflags.data_parallel_reduce_method = 'sum'
                Esqr = (i**2).mean(axis=3).mean(axis=2).mean(axis=0).reshape(
                    (1, -1))
                Esqr.vflags.data_parallel_reduce_method = 'sum'
            mean_Esqr_nodes.append(mean)
            mean_Esqr_nodes.append(Esqr)

        func = Function().compile(mean_Esqr_nodes)

        for i in range(len(bn_oprs)):
            opr = bn_oprs[i]
            layer_mean, layer_var = _get_dataset_mean_var(data, func, i)
            if layer_mean.ndim == 0:
                layer_mean = layer_mean.reshape((1, ))
            if layer_var.ndim == 0:
                layer_var = layer_var.reshape((1, ))

            state = opr.State(channels=layer_mean.shape[0],
                              val=[layer_mean, layer_var, 1])
            state.owner_opr_type = type(opr)

            opr.set_opr_state(state)

            opr.freezed = True

        env.register_checkpoint_component("network", net)
        env.save_checkpoint(save_model_file)
예제 #6
0
def test(net=None):
    if net == None:
        net = load_network(open("./data/resnet110.data", "rb"))
    test_func = Function().compile(net.outputs[0])
    """
	from megskull.network import NetworkVisitor
	visitor = NetworkVisitor(net.loss_var)
	for i in visitor.all_oprs:
		print(i)
		print("input = ", i.inputs)
		print("output = ", i.outputs)
		a = np.array(i.outputs[0].get_value())
		print(a)
		input()
	a = np.array(visitor.all_oprs_dict["conv1:W"].get_value())
	print(a)
	print("mean = ", np.mean(a, axis = 0))
	print("std = ", np.std(a, axis = 0))
	exit()
	"""
    dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
    data = dic[b'data']
    label = dic[b'labels']

    data = data.astype(np.float32)
    import pickle
    with open("meanstd.data", "rb") as f:
        mean, std = pickle.load(f)
    data = (data - mean) / std
    data = np.resize(data, (10000, 3, 32, 32))
    data = data.astype(np.float32)
    """
	import cv2
	for i in range(10):
		img = data[i].transpose(1, 2, 0)
		img = img[:,::-1,:]
		cv2.imshow('x', img)
		cv2.waitKey(0)
	"""
    #data = data.astype(np.float32)
    #data = (data - 128) / 256

    pred = test_func(data=data)
    print(np.array(pred).shape)
    pred = np.argmax(np.array(pred), axis=1)
    acc = (np.array(pred) == np.array(label)).mean()
    print(acc)
예제 #7
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/resnet20.data_bned", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
예제 #8
0
파일: pca_ener.py 프로젝트: lyuyanyii/CIFAR
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
from numpy.linalg import svd

net = load_network(open("./data/plain30_dp_sup.data", "rb"))
func = Function().compile(net.outputs)


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']
data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)

idx = np.random.randint(data.shape[0], size=3000)
data = [data[i] for i in idx]

예제 #9
0
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
from numpy.linalg import svd

net = load_network(open("./data/plain30_orth.data", "rb"))
func = Function().compile(net.outputs)


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']
data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)

idx = np.random.randint(data.shape[0], size=3000)
data = [data[i] for i in idx]

예제 #10
0
파일: test_new.py 프로젝트: lyuyanyii/CIFAR
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/wc0.data", "rb"))
test_func = Function().compile(net.outputs[0])

lis = net.loss_var.owner_opr._param_weights


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("/home/liuyanyi02/CIFAR/meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
예제 #11
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/resnet110.data", "rb"))
test_func = Function().compile(net.outputs[0])

from megskull.network import NetworkVisitor
visitor = NetworkVisitor(net.loss_var)
#print(visitor.all_oprs)
"""
for i in visitor.all_oprs:
	print(i)
	print("input = ", i.inputs)
	print("output = ", i.outputs)
	a = np.array(i.outputs[0].get_value())
	print(a)
	input()
"""
a = np.array(visitor.all_oprs_dict["conv1:W"].get_value())
print(a)
print("mean = ", np.mean(a, axis = 0))
print("std = ", np.std(a, axis = 0))
exit()

def load_data(name):
	import pickle
	with open(name, "rb") as fo:
		dic = pickle.load(fo, encoding = "bytes")
	return dic
예제 #12
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
import cv2

net = load_network(open("./data/resnet20.data_acc91.14", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)

raw_data = data.copy()
raw_data = np.resize(raw_data, (10000, 3, 32, 32))

data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
idx = 2
예제 #13
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/Res110Mixup.data", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("/home/liuyanyi02/CIFAR/meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
예제 #14
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/fixedfc.data", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("/home/liuyanyi02/CIFAR/meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
예제 #15
0
파일: test_new.py 프로젝트: lyuyanyii/CIFAR
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/fixed_res110.data", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("/home/liuyanyi02/CIFAR/meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
예제 #16
0
파일: test_new.py 프로젝트: lyuyanyii/CIFAR
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/densenetl100k24.data", "rb"))
test_func = Function().compile(net.outputs[0])

def load_data(name):
	import pickle
	with open(name, "rb") as fo:
		dic = pickle.load(fo, encoding = "bytes")
	return dic

dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
	mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
	img = img[:,::-1,:]
	cv2.imshow('x', img)
예제 #17
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
import cv2

net = load_network(open("./data/multiscal_v2.data", "rb"))
test_func = Function().compile(net.outputs[0])

from megskull.network import NetworkVisitor
visitor = NetworkVisitor(net.loss_var)


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
예제 #18
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/plain30_dropout.data", "rb"))
test_func = Function().compile(net.outputs[0])

def load_data(name):
	import pickle
	with open(name, "rb") as fo:
		dic = pickle.load(fo, encoding = "bytes")
	return dic

dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
	mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
	img = img[:,::-1,:]
	cv2.imshow('x', img)
예제 #19
0
def main():
    parser = argparse.ArgumentParser(
        description='Dump the Python Megbrain model to C++ model, by the way '
        'optimizing for inference',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('input', help='input pkl model file ')
    parser.add_argument('-o', '--output', help='output file', required=True)
    parser.add_argument('--init-bn',
                        action='store_true',
                        help='initialize untrained batch-normalization, to '
                        'avoid NaN or Inf results')
    parser.add_argument('--silent',
                        action='store_true',
                        help='set verbose to False in AssertEqual opr')
    parser.add_argument('--optimize-for-inference',
                        action='store_true',
                        help='enbale optimization for inference')
    parser.add_argument('--discard-var-name',
                        action='store_true',
                        help='discard variable and param names in the '
                        'generated output')
    parser.add_argument('--output-strip-info',
                        action='store_true',
                        help='output code strip information')
    parser.add_argument(
        '--enable-io16xc32',
        action='store_true',
        help='transform the mode to float16 io float32 compute')
    parser.add_argument('--enable-ioc16',
                        action='store_true',
                        help='transform the dtype of the model to float16 io '
                        'and compute')
    parser.add_argument('--enable-fuse-conv-bias-nonlinearity',
                        action='store_true',
                        help='fuse convolution bias and nonlinearity opr to a '
                        'conv_bias opr and compute')
    parser.add_argument('--enable-hwcd4',
                        action='store_true',
                        help='transform the model format from NCHW to NHWCD4 '
                        'for inference; you may need to disable CUDA and set '
                        'MGB_USE_MEGDNN_DBG=2')
    parser.add_argument('--enable-nchw4',
                        action='store_true',
                        help='transform the model format from NCHW to NCHW4 '
                        'for inference')
    parser.add_argument('--enable-nchw88',
                        action='store_true',
                        help='transform the model format from NCHW to NCHW88 '
                        'for inference')
    parser.add_argument('--enable-nchw44',
                        action='store_true',
                        help='transform the model format from NCHW to NCHW44 '
                        'for inference')
    parser.add_argument(
        '--enable-nchw44-dot',
        action='store_true',
        help='transform the model format from NCHW to NCHW44_DOT '
        'for optimizing armv8.2 dot in inference')
    parser.add_argument('--enable-chwn4',
                        action='store_true',
                        help='transform the model format to CHWN4 '
                        'for inference, mainly used for nvidia tensorcore')
    parser.add_argument('--enable-nchw32',
                        action='store_true',
                        help='transform the model format from NCHW4 to NCHW32 '
                        'for inference on nvidia TensoCore')
    parser.add_argument(
        '--enable-fuse-conv-bias-with-z',
        action='store_true',
        help='fuse conv_bias with z input for inference on '
        'nvidia GPU (this optimization pass will result in mismatch '
        'of the precision of output of training and inference)')
    args = parser.parse_args()

    env = FpropEnv(verbose_fprop=False)

    outputs = io.load_network(args.input).outputs

    output_mgbvars = list(map(env.get_mgbvar, outputs))

    output_mgbvars = optimize_for_inference(args, output_mgbvars)

    if args.discard_var_name:
        sereg_kwargs = dict(keep_var_name=0, keep_param_name=False)
    else:
        sereg_kwargs = dict(keep_var_name=2, keep_param_name=True)

    stat = mgb.serialize_comp_graph_to_file(
        args.output,
        output_mgbvars,
        append=False,
        output_strip_info=args.output_strip_info,
        **sereg_kwargs)
    logger.info(
        'graph dump sizes: tot_size={:.3f}KiB overhead={:.3f}KiB'.format(
            stat.tot_bytes / 1024,
            (stat.tot_bytes - stat.tensor_value_bytes) / 1024))
예제 #20
0
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
from numpy.linalg import svd

net = load_network(open("./data/plain30_xcep.data", "rb"))
func = Function().compile(net.outputs)

def load_data(name):
	import pickle
	with open(name, "rb") as fo:
		dic = pickle.load(fo, encoding = "bytes")
	return dic

dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']
data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
	mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)


idx = np.random.randint(data.shape[0], size = 3000)
data = [data[i] for i in idx]

def pca_ener(A):
	A = A.T
예제 #21
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/fixed_den12.data", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("/home/liuyanyi02/CIFAR/meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
예제 #22
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
from megskull.opr.all import Grad
import numpy as np
import cv2
from train import MyMomentum

net = load_network(open("./data/plain30_dfconv.data", "rb"))
#test_func = Function().compile(net.outputs[0])

from megskull.network import NetworkVisitor

visitor = NetworkVisitor(net.loss_var)
offsets = []
offsets_name = []
locs = []
for i in visitor.all_oprs:
    print(i, i.name)
    if "Astype" in i.name:
        locs.append(i)
    if i.name in ["pooling32offsetx"]:
        offsets.append(i)
        offsets_name.append(i.name)
        print("A")
    """
	for j in i.inputs:
		if j.name == "Mul630":
			print(i.name, "***")
			offsets.append(i)
			offsets_name.append(i.name)
예제 #23
0
파일: test_new.py 프로젝트: lyuyanyii/CIFAR
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np

net = load_network(open("./data/densenetl40k12.data", "rb"))
test_func = Function().compile(net.outputs[0])


def load_data(name):
    import pickle
    with open(name, "rb") as fo:
        dic = pickle.load(fo, encoding="bytes")
    return dic


dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
    mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2
for i in range(10):
	img = data[i].transpose(1, 2, 0)
예제 #24
0
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
import cv2
from train import MyMomentum

net = load_network(open("./data/dfconv.data", "rb"))
#test_func = Function().compile(net.outputs[0])

from megskull.network import NetworkVisitor
visitor = NetworkVisitor(net.loss_var)
offsets = []
locs = []
for i in visitor.all_oprs:
    print(i, i.name)
    if "Astype" in i.name:
        locs.append(i)
    if i.name == "conv2offsetx":
        offsets.append(i)
        print("A")
print(len(locs))
locs = locs[::4]
outs = [net.outputs[0]] + locs
test_func = Function().compile(outs)
outs1 = offsets
offs_func = Function().compile(outs1)


def load_data(name):
    import pickle
예제 #25
0
파일: test_new.py 프로젝트: lyuyanyii/CIFAR
from megskull.graph import FpropEnv
from meghair.utils.io import load_network
from megskull.graph import Function
import numpy as np
import cv2

net = load_network(open("./data/comp.data", "rb"))
test_func = Function().compile(net.outputs[0])

from megskull.network import NetworkVisitor
visitor = NetworkVisitor(net.loss_var)

def load_data(name):
	import pickle
	with open(name, "rb") as fo:
		dic = pickle.load(fo, encoding = "bytes")
	return dic

dic = load_data("/home/liuyanyi02/CIFAR/cifar-10-batches-py/test_batch")
data = dic[b'data']
label = dic[b'labels']

data = data.astype(np.float32)
import pickle
with open("meanstd.data", "rb") as f:
	mean, std = pickle.load(f)
data = (data - mean) / std
data = np.resize(data, (10000, 3, 32, 32))
data = data.astype(np.float32)
"""
import cv2