# download data if necessary def _download(data_dir): if not os.path.isdir(data_dir): os.system('mkdir ' + data_dir) os.chdir(data_dir) if (not os.path.exists('train.rec')) or \ (not os.path.exists('test.rec')): os.system('wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip') os.system('unzip -u cifar10.zip') os.system('mv cifar/* .; rm -rf cifar; rm cifar10.zip') os.chdir('..') # network from symbol_resnet import get_symbol net = get_symbol(num_class=10) # data def get_iterator(args, kv): kargs = dict( data_shape=(3, 32, 32), # Use mean and scale works equally well # We use BatchNorm after data for simplicity # mean_r=127, # mean_g=127, # mean_b=127, # scale=1 / 60 ) if '://' not in args.data_dir: _download(args.data_dir)
from symbol_resnet import get_symbol def check_speed(sym, ctx, scale=1.0, N=100): exe = sym.simple_bind(grad_req='write', **ctx) init = [ np.random.normal(size=arr.shape, scale=scale) for arr in exe.arg_arrays ] for arr, iarr in zip(exe.arg_arrays, init): arr[:] = iarr.astype(arr.dtype) exe.forward(is_train=False) #exe.backward(exe.outputs[0]) exe.outputs[0].wait_to_read() tic = time.time() for i in range(N): exe.forward(is_train=False) exe.backward(exe.outputs[0]) exe.outputs[0].wait_to_read() return (time.time() - tic) * 1000.0 / N sym_res = get_symbol() print(mx.visualization.print_summary(sym_res, shape={'data': (1, 3, 224, 224)})) #ctx_list = [{'ctx': mx.gpu(0), 'data': (1, 3, 224, 224)}] #for ctx in ctx_list: # print(ctx, check_speed(sym_res, ctx, N=10))
parser.add_argument('--num-epochs', type=int, default=200, help='the number of training epochs') parser.add_argument('--load-epoch', type=int, help='load the model on an epoch using the model-prefix') parser.add_argument('--kv-store', type=str, default='local', help='the kvstore type') args = parser.parse_args() # network from symbol_resnet import get_symbol net = get_symbol(num_class=10) # data def get_iterator(args, kv): kargs = dict( data_shape=(3, 32, 32), # Use mean and scale works equally well # We use BatchNorm after data for simplicity # mean_r=127, # mean_g=127, # mean_b=127, # scale=1 / 60 ) train = mx.io.ImageRecordIter(
type=int, default=4, help='the number of training epochs') parser.add_argument('--load-epoch', type=int, help='load the model on an epoch using the model-prefix') parser.add_argument('--kv-store', type=str, default='local', help='the kvstore type') args = parser.parse_args() # network from symbol_resnet import get_symbol net = get_symbol(num_classes=1000, num_layers=50, image_shape='3, 224, 224', conv_workspace=512) # data def get_iterator(args, kv): kargs = dict( data_shape=(3, 224, 224), # Use mean and scale works equally well # We use BatchNorm after data for simplicity # mean_r=127, # mean_g=127, # mean_b=127, # scale=1 / 60 )