def test_window_flip_coverage(): ddata = DummyDataset() topo = ddata.get_topological_view() ref_win = [set() for _ in xrange(4)] for b in xrange(topo.shape[-1]): for i in xrange(3): for j in xrange(3): window = topo[:, i:i + 3, j:j + 3, b] assert_equal((3, 3), window.shape[1:]) ref_win[b].add(_hash_array(window)) ref_win[b].add(_hash_array(window[:, :, ::-1])) actual_win = [set() for _ in xrange(4)] wf = WindowAndFlipC01B(window_shape=(3, 3), randomize=[ddata]) wf.setup(None, ddata, None) curr_topo = ddata.get_topological_view() assert_equal((2, 3, 3, 4), curr_topo.shape) for b in xrange(topo.shape[-1]): hashed = _hash_array(curr_topo[..., b]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed) while not all(len(a) == len(b) for a, b in zip(ref_win, actual_win)): prev_topo = curr_topo.copy() wf.on_monitor(None, ddata, None) curr_topo = ddata.get_topological_view() assert_(not (prev_topo == curr_topo).all()) for b in xrange(topo.shape[-1]): hashed = _hash_array(curr_topo[..., b]) assert hashed in ref_win[b] actual_win[b].add(hashed)
def check_window_flip_coverage_B01C(flip): ddata = DummyDataset(axes=('b', 0, 1, 'c')) topo = ddata.get_topological_view() ref_win = [set() for _ in xrange(4)] for b in xrange(topo.shape[0]): for i in xrange(3): for j in xrange(3): window = topo[b, i:i + 3, j:j + 3, :] assert_equal((3, 3), window.shape[:-1]) ref_win[b].add(_hash_array(window)) if flip: ref_win[b].add(_hash_array(window[:, ::-1, :])) actual_win = [set() for _ in xrange(4)] wf = WindowAndFlip(window_shape=(3, 3), randomize=[ddata], flip=flip) wf.setup(None, ddata, None) curr_topo = ddata.get_topological_view() assert_equal((4, 3, 3, 2), curr_topo.shape) for b in xrange(topo.shape[0]): hashed = _hash_array(curr_topo[b, ...]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed) while not all(len(a) == len(b) for a, b in zip(ref_win, actual_win)): prev_topo = curr_topo.copy() wf.on_monitor(None, ddata, None) curr_topo = ddata.get_topological_view() assert_(not (prev_topo == curr_topo).all()) for b in xrange(topo.shape[0]): hashed = _hash_array(curr_topo[b, ...]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed)
def check_padding(axes): padding = 3 ddata = DummyDataset() topo = ddata.get_topological_view() wf_cls = WindowAndFlip wf = wf_cls(window_shape=(5, 5), randomize=[ddata], pad_randomized=padding) wf.setup(None, None, None) new_topo = ddata.get_topological_view() assert_equal(topo.shape, new_topo.shape) saw_padding = dict([((direction, amount), False) for direction, amount in itertools.product(['l', 'b', 'r', 't'], xrange(padding))]) iters = 0 while not all(saw_padding.values()) and iters < 50: for image in new_topo.swapaxes(0, 3): for i in xrange(padding): if (image[:i] == 0).all(): saw_padding['t', i] = True if (image[-i:] == 0).all(): saw_padding['b', i] = True if (image[:, -i:] == 0).all(): saw_padding['r', i] = True if (image[:, :i] == 0).all(): saw_padding['l', i] = True wf.on_monitor(None, None, None) new_topo = ddata.get_topological_view() iters += 1
def check_padding(axes, use_old_c01b=False): padding = 3 ddata = DummyDataset() topo = ddata.get_topological_view() if use_old_c01b: wf_cls = WindowAndFlipC01B else: wf_cls = WindowAndFlip wf = wf_cls(window_shape=(5, 5), randomize=[ddata], pad_randomized=padding) wf.setup(None, None, None) new_topo = ddata.get_topological_view() assert_equal(topo.shape, new_topo.shape) saw_padding = dict([((direction, amount), False) for direction, amount in itertools.product( ['l', 'b', 'r', 't'], xrange(padding))]) iters = 0 while not all(saw_padding.values()) and iters < 50: for image in new_topo.swapaxes(0, 3): for i in xrange(padding): if (image[:i] == 0).all(): saw_padding['t', i] = True if (image[-i:] == 0).all(): saw_padding['b', i] = True if (image[:, -i:] == 0).all(): saw_padding['r', i] = True if (image[:, :i] == 0).all(): saw_padding['l', i] = True wf.on_monitor(None, None, None) new_topo = ddata.get_topological_view() iters += 1
def test_conditional_initialize_parameters(): """ Conditional.initialize_parameters does the following: * Set its input_space and ndim attributes * Calls its MLP's set_mlp method * Sets its MLP's input_space * Validates its MLP * Sets its params and param names """ mlp = MLP(layers=[Linear(layer_name='h', dim=5, irange=0.01, max_col_norm=0.01)]) conditional = DummyConditional(mlp=mlp, name='conditional') vae = DummyVAE() conditional.set_vae(vae) input_space = VectorSpace(dim=5) conditional.initialize_parameters(input_space=input_space, ndim=5) testing.assert_same_object(input_space, conditional.input_space) testing.assert_equal(conditional.ndim, 5) testing.assert_same_object(mlp.get_mlp(), conditional) testing.assert_same_object(mlp.input_space, input_space) mlp_params = mlp.get_params() conditional_params = conditional.get_params() assert all([mp in conditional_params for mp in mlp_params]) assert all([cp in mlp_params for cp in conditional_params])
def test_WindowAndFlipC01B_axes_guard(): ddata = DummyDataset(axes=('b', 0, 1, 'c')) raised_error = False try: wf = WindowAndFlipC01B(window_shape=(3, 3), randomize=[ddata]) except ValueError: raised_error = True assert_equal(raised_error, True)
def test_conditional_returns_lr_scalers(): """ Conditional.get_lr_scalers calls its MLP's get_lr_scalers method """ mlp = MLP(layers=[Linear(layer_name="h", dim=5, irange=0.01, W_lr_scale=0.01)]) conditional = DummyConditional(mlp=mlp, name="conditional") vae = DummyVAE() conditional.set_vae(vae) conditional.initialize_parameters(input_space=VectorSpace(dim=5), ndim=5) testing.assert_equal(conditional.get_lr_scalers(), mlp.get_lr_scalers())
def test_conditional_returns_lr_scalers(): """ Conditional.get_lr_scalers calls its MLP's get_lr_scalers method """ mlp = MLP(layers=[Linear(layer_name='h', dim=5, irange=0.01, W_lr_scale=0.01)]) conditional = DummyConditional(mlp=mlp, name='conditional') vae = DummyVAE() conditional.set_vae(vae) conditional.initialize_parameters(input_space=VectorSpace(dim=5), ndim=5) testing.assert_equal(conditional.get_lr_scalers(), mlp.get_lr_scalers())
def test_prior_set_vae(): """ Prior.set_vae adds a reference to the vae and adopts the vae's rng and batch_size attributes """ prior = DummyPrior() vae = DummyVAE() prior.set_vae(vae) testing.assert_same_object(prior.vae, vae) testing.assert_same_object(prior.rng, vae.rng) testing.assert_equal(prior.batch_size, vae.batch_size)
def test_conditional_modify_updates(): """ Conditional.modify_updates calls its MLP's modify_updates method """ mlp = MLP(layers=[Linear(layer_name="h", dim=5, irange=0.01, max_col_norm=0.01)]) conditional = DummyConditional(mlp=mlp, name="conditional") vae = DummyVAE() conditional.set_vae(vae) conditional.initialize_parameters(input_space=VectorSpace(dim=5), ndim=5) updates = OrderedDict(zip(mlp.get_params(), mlp.get_params())) testing.assert_equal(conditional.modify_updates(updates), mlp.modify_updates(updates))
def test_conditional_set_vae(): """ Conditional.set_vae adds a reference to the vae and adopts the vae's rng and batch_size attributes """ mlp = MLP(layers=[Linear(layer_name='h', dim=5, irange=0.01)]) conditional = DummyConditional(mlp=mlp, name='conditional') vae = DummyVAE() conditional.set_vae(vae) testing.assert_same_object(conditional.vae, vae) testing.assert_same_object(conditional.rng, vae.rng) testing.assert_equal(conditional.batch_size, vae.batch_size)
def test_conditional_set_vae(): """ Conditional.set_vae adds a reference to the vae and adopts the vae's rng and batch_size attributes """ mlp = MLP(layers=[Linear(layer_name="h", dim=5, irange=0.01)]) conditional = DummyConditional(mlp=mlp, name="conditional") vae = DummyVAE() conditional.set_vae(vae) testing.assert_same_object(conditional.vae, vae) testing.assert_same_object(conditional.rng, vae.rng) testing.assert_equal(conditional.batch_size, vae.batch_size)
def test_conditional_modify_updates(): """ Conditional.modify_updates calls its MLP's modify_updates method """ mlp = MLP(layers=[Linear(layer_name='h', dim=5, irange=0.01, max_col_norm=0.01)]) conditional = DummyConditional(mlp=mlp, name='conditional') vae = DummyVAE() conditional.set_vae(vae) conditional.initialize_parameters(input_space=VectorSpace(dim=5), ndim=5) updates = OrderedDict(zip(mlp.get_params(), mlp.get_params())) testing.assert_equal(conditional.modify_updates(updates), mlp.modify_updates(updates))
def check_window_flip_coverage_C01B(flip, use_old_c01b=False): # 4 5x5x2 images (stored in a 2x5x5x4 tensor) ddata = DummyDataset(axes=('c', 0, 1, 'b')) topo = ddata.get_topological_view() # ref_win[b]: a set of hashes, computed from all possible 3x3 windows of # topo[..., b]. ref_win = [set() for _ in xrange(4)] for b in xrange(topo.shape[-1]): # get all possible 3x3 windows within the 5x5 images. for i in xrange(3): for j in xrange(3): window = topo[:, i:i + 3, j:j + 3, b] assert_equal((3, 3), window.shape[1:]) # Add a SHA1 digest of the window to set ref_win[b] ref_win[b].add(_hash_array(window)) if flip: # Also add the hash of the window with axis 1 flipped ref_win[b].add(_hash_array(window[:, :, ::-1])) actual_win = [set() for _ in xrange(4)] if use_old_c01b: wf_cls = WindowAndFlipC01B else: wf_cls = WindowAndFlip # no zero-padding. wf = wf_cls(window_shape=(3, 3), randomize=[ddata], flip=flip) wf.setup(None, ddata, None) # ddata argument is ignored curr_topo = ddata.get_topological_view() assert_equal((2, 3, 3, 4), curr_topo.shape) for b in xrange(topo.shape[-1]): hashed = _hash_array(curr_topo[..., b]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed) while not all(len(a) == len(b) for a, b in zip(ref_win, actual_win)): prev_topo = curr_topo.copy() wf.on_monitor(None, ddata, None) curr_topo = ddata.get_topological_view() assert_(not (prev_topo == curr_topo).all()) for b in xrange(topo.shape[-1]): hashed = _hash_array(curr_topo[..., b]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed)
def check_window_flip_coverage_C01B(flip, use_old_c01b=False): ddata = DummyDataset(axes=('c', 0, 1, 'b')) topo = ddata.get_topological_view() ref_win = [set() for _ in xrange(4)] for b in xrange(topo.shape[-1]): for i in xrange(3): for j in xrange(3): window = topo[:, i:i + 3, j:j + 3, b] assert_equal((3, 3), window.shape[1:]) ref_win[b].add(_hash_array(window)) if flip: ref_win[b].add(_hash_array(window[:, :, ::-1])) actual_win = [set() for _ in xrange(4)] if use_old_c01b: wf_cls = WindowAndFlipC01B else: wf_cls = WindowAndFlip wf = wf_cls(window_shape=(3, 3), randomize=[ddata], flip=flip) wf.setup(None, ddata, None) curr_topo = ddata.get_topological_view() assert_equal((2, 3, 3, 4), curr_topo.shape) for b in xrange(topo.shape[-1]): hashed = _hash_array(curr_topo[..., b]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed) while not all(len(a) == len(b) for a, b in zip(ref_win, actual_win)): prev_topo = curr_topo.copy() wf.on_monitor(None, ddata, None) curr_topo = ddata.get_topological_view() assert_(not (prev_topo == curr_topo).all()) for b in xrange(topo.shape[-1]): hashed = _hash_array(curr_topo[..., b]) assert_contains(ref_win[b], hashed) actual_win[b].add(hashed)
def test_conditional_initialize_parameters(): """ Conditional.initialize_parameters does the following: * Set its input_space and ndim attributes * Calls its MLP's set_mlp method * Sets its MLP's input_space * Validates its MLP * Sets its params and param names """ mlp = MLP(layers=[Linear(layer_name="h", dim=5, irange=0.01, max_col_norm=0.01)]) conditional = DummyConditional(mlp=mlp, name="conditional") vae = DummyVAE() conditional.set_vae(vae) input_space = VectorSpace(dim=5) conditional.initialize_parameters(input_space=input_space, ndim=5) testing.assert_same_object(input_space, conditional.input_space) testing.assert_equal(conditional.ndim, 5) testing.assert_same_object(mlp.get_mlp(), conditional) testing.assert_same_object(mlp.input_space, input_space) mlp_params = mlp.get_params() conditional_params = conditional.get_params() assert all([mp in conditional_params for mp in mlp_params]) assert all([cp in mlp_params for cp in conditional_params])