def test_all_scalar_output_fuzzy(): """ Test output for fuzzy factors, i.e. means in [0,1] """ L = 3 for model in aux.canonical_loms(): for randiter in range(3): z = np.random.rand(L) u = np.random.rand(L) numba_fct = lupd.get_scalar_output_function_2d(model, fuzzy=True) numba_out = numba_fct(z, u) python_out = scalar_output_python_fuzzy(model, z, u) try: assert abs(numba_out - python_out) < 1e-12 except: import pdb pdb.set_trace() raise ValueError( 'Fuzzy scalar output function for ' + model + ' failed.')
def test_all_3D_LOMs(): operators = ['AND', 'NAND', 'OR', 'NOR', 'XOR', 'NXOR'] # operators = ['OR', 'AND'] machines = [ x[0] + '-' + x[1] for x in list(itertools.product(operators, repeat=2)) ] for machine in aux.canonical_loms(): # machines: N = 50 D = 10 L = 3 Z = np.array(np.random.rand(N, L) > .5, dtype=np.int8) U = np.array(np.random.rand(D, L) > .5, dtype=np.int8) V = np.array(np.random.rand(D, L) > .5, dtype=np.int8) # generate_data_fast is not available for all machines X = aux.lom_generate_data([2 * Z - 1, 2 * U - 1, 2 * V - 1], model=machine) orm = lom.Machine() data = orm.add_matrix(X, fixed=True) layer = orm.add_layer(latent_size=L, child=data, model=machine) layer.z.val = (1 - 2 * layer.invert_factors) * (2 * Z - 1) layer.u.val = (1 - 2 * layer.invert_factors) * (2 * U - 1) layer.v.val = (1 - 2 * layer.invert_factors) * (2 * V - 1) # we initialise with ground truth, hence set lbda large to avoid effectively # random initialisation layer.lbda.val = 3.0 orm.infer(burn_in_min=10, fix_lbda_iters=2) try: assert np.mean((2 * (layer.output(technique='factor_map') > .5) - 1) == data()) > .98 assert np.mean((2 * (layer.output(technique='factor_mean') > .5) - 1) == data()) > .98 except: acc = np.mean((2 * (layer.output(technique='factor_mean') > .5) - 1) == data()) print(machine + ' failed with reconstruction accuracy of ' + str(acc)) # import pdb; pdb.set_trace() raise ValueError()
def test_all_scalar_output(): """ Test numba implementations against the simple python implementation """ # generate random data L = 3 for model in aux.canonical_loms(): for randiter in range(10): z = 2 * np.array(np.random.rand(L) > .5, dtype=np.int8) - 1 u = 2 * np.array(np.random.rand(L) > .5, dtype=np.int8) - 1 numba_fct = lupd.get_scalar_output_function_2d(model, fuzzy=False) try: assert numba_fct(z, u) == scalar_output_python(model, z, u) except: raise ValueError( 'Scalar output function for ' + model + ' failed.')
def test_densities(): machines = aux.canonical_loms(level='clans', mode='implemented') for machine in machines: d = aux.expected_density(machine, L=3, K=2, f=.5) N = 200 D = 200 L = 3 Z = np.array(np.random.rand(N, L) > .5, dtype=np.int8) U = np.array(np.random.rand(D, L) > .5, dtype=np.int8) X = aux.lom_generate_data_fast([2 * Z - 1, 2 * U - 1], model=machine) X_train, train_mask = experiments.split_train_test(X, split=.1) try: assert (np.abs(np.mean(X == 1) - d)) < 5e-2 except: print(np.abs(np.mean(X == 1))) print(d) print(machine)