def test_random_crop_forward_backward(seed, inshape, shape, ctx, func_name): from nbla_test_utils import function_tester rng = np.random.RandomState(seed) inputs = [rng.randn(*inshape).astype(np.float32)] i = nn.Variable(inputs[0].shape, need_grad=True) i.d = inputs[0] # NNabla forward with nn.context_scope(ctx), nn.auto_forward(): o = F.random_crop(i, shape, 0, seed) if shape is not None: max_correl = 0 possible_crop_range = [ input - output for output, input in zip(shape, inshape) ] for crop_pos in itertools.product(*map( tuple, map(lambda x: range(*x), [(0, r + 1) for r in possible_crop_range]))): r = inputs[0][crop_pos[0]:crop_pos[0] + shape[0], crop_pos[1]:crop_pos[1] + shape[1], crop_pos[2]:crop_pos[2] + shape[2]] assert (o.d.shape == r.shape) correl_and_p = pearsonr(o.d.flatten(), r.flatten()) if correl_and_p[0] > max_correl: max_correl = correl_and_p[0] else: max_correl = pearsonr(o.d.flatten(), inputs[0].flatten())[0] assert (max_correl == 1.0) assert o.parent.name == func_name # Skipping Backward check g = np.random.randn(*i.shape) i.g = g o_grad = np.random.randn(*o.shape) o.g = o_grad o.parent.backward([i], [o]) ref_grad = i.g.copy() - g # Check accum=False with NaN gradient i.g = np.float32('nan') o.parent.backward([i], [o], [False]) assert not np.any(np.isnan(i.g)) # Check if accum option works i.g[...] = 1 o.g = o_grad o.parent.backward([i], [o], [False]) assert np.allclose(i.g, ref_grad, atol=1e-6) # Check if need_grad works i.g[...] = 0 i.need_grad = False o_diff = rng.randn(*o.shape).astype(i.d.dtype) o.backward(o_diff) assert np.all(i.g == 0)
def construct_networks(args, ops, arch_dict, image, test): """ Construct a network by stacking cells. input: args: arguments set by user. ops: operations used in the network. arch_dict: a dictionary containing architecture information. image: Variable. Input images. test: bool. True if the network is for validation. """ num_of_cells = args.num_cells initial_output_filter = args.output_filter + args.additional_filters_on_retrain num_class = 10 aux_logits = None if not test: image = F.random_crop(F.pad(image, (4, 4, 4, 4)), shape=(image.shape)) image = F.image_augmentation(image, flip_lr=True) image.need_grad = False x = image with nn.parameter_scope("stem_conv1"): stem_1 = PF.convolution(x, initial_output_filter, (3, 3), (1, 1), with_bias=False) stem_1 = PF.batch_normalization(stem_1, batch_stat=not test) cell_prev, cell_prev_prev = stem_1, stem_1 output_filter = initial_output_filter is_reduced_curr, is_reduced_prev = False, False for i in range(num_of_cells): if i in [num_of_cells // 3, 2 * num_of_cells // 3]: output_filter = 2 * output_filter is_reduced_curr = True else: is_reduced_curr = False y, is_reduced_curr, is_reduced_prev, output_filter = \ constructing_learned_cell(args, ops, arch_dict, i, cell_prev_prev, cell_prev, output_filter, is_reduced_curr, is_reduced_prev, test) if i == 2 * num_of_cells // 3 and args.auxiliary and not test: print("Using Aux Tower after cell_{}".format(i)) aux_logits = construct_aux_head(y, num_class) cell_prev, cell_prev_prev = y, cell_prev # shifting y = F.average_pooling(y, y.shape[2:]) # works as global average pooling with nn.parameter_scope("fc"): pred = PF.affine(y, num_class, with_bias=True) return pred, aux_logits
def test_random_crop_forward_backward(seed, inshape, shape, ctx, func_name): from nbla_test_utils import function_tester rng = np.random.RandomState(seed) inputs = [rng.randn(*inshape).astype(np.float32)] i = nn.Variable(inputs[0].shape, need_grad=True) i.d = inputs[0] # NNabla forward with nn.context_scope(ctx), nn.auto_forward(): o = F.random_crop(i, shape, 0, seed) if shape is not None: max_correl = 0 possible_crop_range = [ input - output for output, input in zip(shape, inshape)] for crop_pos in itertools.product(*map(tuple, map(lambda x: range(*x), [(0, r + 1) for r in possible_crop_range]))): r = inputs[0][crop_pos[0]:crop_pos[0] + shape[0], crop_pos[1]:crop_pos[1] + shape[1], crop_pos[2]:crop_pos[2] + shape[2]] assert(o.d.shape == r.shape) correl_and_p = pearsonr(o.d.flatten(), r.flatten()) if correl_and_p[0] > max_correl: max_correl = correl_and_p[0] else: max_correl = pearsonr(o.d.flatten(), inputs[0].flatten())[0] assert(max_correl == 1.0) assert o.parent.name == func_name # Skipping Backward check g = np.random.randn(*i.shape) i.g = g o_grad = np.random.randn(*o.shape) o.g = o_grad o.parent.backward([i], [o]) ref_grad = i.g.copy() - g # Check accum=False with NaN gradient i.g = np.float32('nan') o.parent.backward([i], [o], [False]) assert not np.any(np.isnan(i.g)) # Check if accum option works i.g[...] = 1 o.g = o_grad o.parent.backward([i], [o], [False]) assert np.allclose(i.g, ref_grad, atol=1e-6) # Check if need_grad works i.g[...] = 0 i.need_grad = False o_diff = rng.randn(*o.shape).astype(i.d.dtype) o.backward(o_diff) assert np.all(i.g == 0)
def image_augmentation(args, img, seg): imgseg = F.concatenate(img, seg, axis=1) imgseg = F.random_crop(imgseg, shape=(args.fineSizeH, args.fineSizeW)) if not args.no_flip: imgseg = F.random_flip(imgseg, axes=(3, )) return imgseg
def random_jitter(wave, max_jitter_steps): r"""Temporal jitter.""" shape = wave.shape wave = F.pad(wave, (0, 0, max_jitter_steps, max_jitter_steps)) wave = F.random_crop(wave, shape=shape) return wave
def __call__(self, input): if self._pad_width is not None: input = F.pad(input, self._pad_width) return F.random_crop(input, shape=self._shape)