def test_sia_bipartitions(): with config.override(CUT_ONE_APPROXIMATION=False): answer = [models.Cut((1,), (2, 3, 4)), models.Cut((2,), (1, 3, 4)), models.Cut((1, 2), (3, 4)), models.Cut((3,), (1, 2, 4)), models.Cut((1, 3), (2, 4)), models.Cut((2, 3), (1, 4)), models.Cut((1, 2, 3), (4,)), models.Cut((4,), (1, 2, 3)), models.Cut((1, 4), (2, 3)), models.Cut((2, 4), (1, 3)), models.Cut((1, 2, 4), (3,)), models.Cut((3, 4), (1, 2)), models.Cut((1, 3, 4), (2,)), models.Cut((2, 3, 4), (1,))] assert sia_bipartitions((1, 2, 3, 4)) == answer with config.override(CUT_ONE_APPROXIMATION=True): answer = [models.Cut((1,), (2, 3, 4)), models.Cut((2,), (1, 3, 4)), models.Cut((3,), (1, 2, 4)), models.Cut((4,), (1, 2, 3)), models.Cut((2, 3, 4), (1,)), models.Cut((1, 3, 4), (2,)), models.Cut((1, 2, 4), (3,)), models.Cut((1, 2, 3), (4,))] assert sia_bipartitions((1, 2, 3, 4)) == answer
def test_sia_cache_key_includes_config_dependencies(s): with config.override(MEASURE='EMD'): emd_big_phi = compute.phi(s) with config.override(MEASURE='L1'): l1_big_phi = compute.phi(s) assert l1_big_phi != emd_big_phi
def test_all_complexes_parallelization(s): with config.override(PARALLEL_COMPLEX_EVALUATION=False): serial = compute.all_complexes(s.network, s.state) with config.override(PARALLEL_COMPLEX_EVALUATION=True): parallel = compute.all_complexes(s.network, s.state) assert sorted(serial) == sorted(parallel)
def test_materialize_list_only_when_needed(): with config.override(PROGRESS_BARS=False): engine = MapSquare(iter([1, 2, 3])) assert not isinstance(engine.iterable, list) with config.override(PROGRESS_BARS=True): engine = MapSquare(iter([1, 2, 3])) assert isinstance(engine.iterable, list)
def test_reconfigure_precision_on_change(): with config.override(PRECISION=100): assert constants.EPSILON == 1e-100 with config.override(PRECISION=3): assert constants.EPSILON == 1e-3 with config.override(PRECISION=123): assert constants.EPSILON == 1e-123
def test_ces_distance_switches_to_small_phi_difference(s): sia = compute.sia(s) ce_structures = (sia.ces, sia.partitioned_ces) with config.override(USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE=False): assert compute.ces_distance(*ce_structures) == 2.3125 with config.override(USE_SMALL_PHI_DIFFERENCE_FOR_CES_DISTANCE=True): assert compute.ces_distance(*ce_structures) == 1.083333
def test_parallel_and_sequential_ces_are_equal(s, micro_s, macro_s): with config.override(PARALLEL_CONCEPT_EVALUATION=False): c = compute.ces(s) c_micro = compute.ces(micro_s) c_macro = compute.ces(macro_s) with config.override(PARALLEL_CONCEPT_EVALUATION=True): assert set(c) == set(compute.ces(s)) assert set(c_micro) == set(compute.ces(micro_s)) assert set(c_macro) == set(compute.ces(macro_s))
def test_parallel_and_sequential_constellations_are_equal(s, micro_s, macro_s): with config.override(PARALLEL_CONCEPT_EVALUATION=False): c = compute.constellation(s) c_micro = compute.constellation(micro_s) c_macro = compute.constellation(macro_s) with config.override(PARALLEL_CONCEPT_EVALUATION=True): assert set(c) == set(compute.constellation(s)) assert set(c_micro) == set(compute.constellation(micro_s)) assert set(c_macro) == set(compute.constellation(macro_s))
def test_reconfigure_logging_on_change(capsys): log = logging.getLogger('pyphi.config') with config.override(LOG_STDOUT_LEVEL='WARNING'): log.warning('Just a warning, folks.') out, err = capsys.readouterr() assert 'Just a warning, folks.' in err with config.override(LOG_STDOUT_LEVEL='ERROR'): log.warning('Another warning.') out, err = capsys.readouterr() assert err == ''
def test_reconfigure_logging_on_change(capsys): log = logging.getLogger("pyphi.config") with config.override(LOG_STDOUT_LEVEL="WARNING"): log.warning("Just a warning, folks.") out, err = capsys.readouterr() assert "Just a warning, folks." in err with config.override(LOG_STDOUT_LEVEL="ERROR"): log.warning("Another warning.") out, err = capsys.readouterr() assert err == ""
def test_clear_subsystem_caches_after_computing_sia_config_option(s): with config.override(CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA=False, PARALLEL_CONCEPT_EVALUATION=False, PARALLEL_CUT_EVALUATION=False, CACHE_REPERTOIRES=True): sia = compute.sia(s) assert s._repertoire_cache.cache with config.override(CLEAR_SUBSYSTEM_CACHES_AFTER_COMPUTING_SIA=True, PARALLEL_CONCEPT_EVALUATION=False, PARALLEL_CUT_EVALUATION=False, CACHE_REPERTOIRES=True): sia = compute.sia(s) assert not s._repertoire_cache.cache
def test_tripartitions_choses_smallest_purview(s): mechanism = (1, 2) with config.override(PICK_SMALLEST_PURVIEW=False): mie = s.mie(mechanism) assert mie.phi == 0.5 assert mie.purview == (0, 1) s.clear_caches() # In phi-tie, chose the smaller purview (0,) with config.override(PICK_SMALLEST_PURVIEW=True): mie = s.mie(mechanism) assert mie.phi == 0.5 assert mie.purview == (0, )
def test_tripartitions_choses_smallest_purview(s): mechanism = (1, 2) with config.override(PICK_SMALLEST_PURVIEW=False): mie = s.mie(mechanism) assert mie.phi == 0.5 assert mie.purview == (0, 1) s.clear_caches() # In phi-tie, chose the smaller purview (0,) with config.override(PICK_SMALLEST_PURVIEW=True): mie = s.mie(mechanism) assert mie.phi == 0.5 assert mie.purview == (0,)
def test_override_config_is_a_context_manager(): config.TEST_CONFIG = 1 with config.override(TEST_CONFIG=1000): # Overriden assert config.TEST_CONFIG == 1000 # Reverts original value assert config.TEST_CONFIG == 1
def test_cache_repertoires_config_option(): with config.override(CACHE_REPERTOIRES=True): SomeObject = factory() o = SomeObject() assert o.cause_repertoire(1) == 'expensive computation' assert o.effect_repertoire(1) == 'expensive computation' expected_key = ('cause', 1) assert expected_key in o.repertoire_cache.cache expected_key = ('effect', 1) assert expected_key in o.repertoire_cache.cache with config.override(CACHE_REPERTOIRES=False): SomeObject = factory() o = SomeObject() assert o.cause_repertoire(1) == 'expensive computation' assert o.effect_repertoire(1) == 'expensive computation' # Repertoire cache should be empty assert not o.repertoire_cache.cache
def test_deserialization_memoizes_duplicate_objects(s): with config.override(PARALLEL_CUT_EVALUATION=True): sia = compute.sia(s) loaded = jsonify.loads(jsonify.dumps(sia)) l1 = loaded.subsystem l2 = loaded.ces.subsystem assert l1 == l2 assert hash(l1) == hash(l2) assert l1 is l2
def test_reconfigure_joblib_on_change(capsys): cachedir = "./__testing123__" try: with config.override(FS_CACHE_DIRECTORY=cachedir): assert constants.joblib_memory.location == cachedir assert Path(cachedir).exists() finally: shutil.rmtree(cachedir) def f(x): return x + 1 with config.override(FS_CACHE_VERBOSITY=0): constants.joblib_memory.cache(f)(42) out, err = capsys.readouterr() assert len(out) == 0 with config.override(FS_CACHE_VERBOSITY=100): constants.joblib_memory.cache(f)(42) out, err = capsys.readouterr() assert len(out) > 0
def test_network_init_validation(network): with pytest.raises(ValueError): # Totally wrong shape tpm = np.arange(3).astype(float) Network(tpm) with pytest.raises(ValueError): # Non-binary nodes (4 states) tpm = np.ones((4, 4, 4, 3)).astype(float) Network(tpm) # Conditionally dependent tpm = np.array([ [1, 0.0, 0.0, 0], [0, 0.5, 0.5, 0], [0, 0.5, 0.5, 0], [0, 0.0, 0.0, 1], ]) with config.override(VALIDATE_CONDITIONAL_INDEPENDENCE=False): Network(tpm) with config.override(VALIDATE_CONDITIONAL_INDEPENDENCE=True): with pytest.raises(exceptions.ConditionallyDependentError): Network(tpm)
def test_validate_tpm_conditional_independence(): tpm = np.array([ [1, 0.0, 0.0, 0], [0, 0.5, 0.5, 0], [0, 0.5, 0.5, 0], [0, 0.0, 0.0, 1], ]) with pytest.raises(ValueError): validate.conditionally_independent(tpm) with config.override(VALIDATE_CONDITIONAL_INDEPENDENCE=False): validate.conditionally_independent(tpm) with pytest.raises(ValueError): validate.tpm(tpm) validate.tpm(tpm, check_independence=False)
def test_num_processes(): # Can't have no processes with config.override(NUMBER_OF_CORES=0): with pytest.raises(ValueError): parallel.get_num_processes() # Negative numbers with config.override(NUMBER_OF_CORES=-1): assert parallel.get_num_processes() == 2 # Too negative with config.override(NUMBER_OF_CORES=-3): with pytest.raises(ValueError): parallel.get_num_processes() # Requesting more cores than available with config.override(NUMBER_OF_CORES=3): assert parallel.get_num_processes() == 2 # Ok with config.override(NUMBER_OF_CORES=1): assert parallel.get_num_processes() == 1
def test_acria_ordering(): assert acria() == acria() assert acria(alpha=0.0) < acria(alpha=1.0) assert acria(alpha=0.0, mechanism=(1, 2)) <= acria(alpha=1.0, mechanism=(1,)) assert acria(alpha=0.0, mechanism=(1, 2)) > acria(alpha=0.0, mechanism=(1,)) assert bool(acria(alpha=1.0)) is True assert bool(acria(alpha=0.0)) is False assert bool(acria(alpha=-1)) is False with pytest.raises(TypeError): acria(direction=Direction.CAUSE) < acria(direction=Direction.EFFECT) with config.override(PICK_SMALLEST_PURVIEW=True): assert acria(purview=(1,)) > acria(purview=(0, 2))
def test_acria_ordering(): assert acria() == acria() assert acria(alpha=0.0) < acria(alpha=1.0) assert (acria(alpha=0.0, mechanism=(1, 2)) <= acria(alpha=1.0, mechanism=(1,))) assert (acria(alpha=0.0, mechanism=(1, 2)) > acria(alpha=0.0, mechanism=(1,))) assert bool(acria(alpha=1.0)) is True assert bool(acria(alpha=0.0)) is False assert bool(acria(alpha=-1)) is False with pytest.raises(TypeError): acria(direction=Direction.CAUSE) < acria(direction=Direction.EFFECT) with config.override(PICK_SMALLEST_PURVIEW=True): assert acria(purview=(1,)) > acria(purview=(0, 2))
def test_deserialization_memoizes_duplicate_objects(s): with config.override(PARALLEL_CUT_EVALUATION=True): sia = compute.sia(s) s1 = sia.subsystem # Computed in a parallel process, so has a different id s2 = sia.ces[0].subsystem assert s1 is not s2 assert s1 == s2 assert hash(s1) == hash(s2) loaded = jsonify.loads(jsonify.dumps(sia)) l1 = loaded.subsystem l2 = loaded.ces[0].subsystem assert l1 == l2 assert hash(l1) == hash(l2) assert l1 is l2
def test_PQR_relations(): with config.override( PARTITION_TYPE="TRI", MEASURE="BLD", ): PQR = examples.PQR() ces = compute.ces(PQR) separated_ces = list(relations.separate_ces(ces)) results = list(relations.relations(PQR, ces)) # NOTE: these phi values are in nats, not bits! answers = [ [(0, 4), 0.6931471805599452, [(2, )]], [(0, 6), 0.6931471805599452, [(2, )]], [(1, 2), 0.3465735902799726, [(0, )]], [(1, 3), 0.3465735902799726, [(0, )]], [(1, 7), 0.3465735902799726, [(0, )]], [(2, 3), 0.3465735902799726, [(0, ), (1, ), (0, 1)]], [(2, 4), 0.3465735902799726, [(1, )]], [(2, 6), 0.3465735902799726, [(0, ), (1, ), (0, 1)]], [(2, 7), 0.3465735902799726, [(0, ), (1, ), (0, 1)]], [(3, 7), 0.693147180559945, [(0, 1)]], [(4, 6), 1.3862943611198901, [(1, 2)]], [(5, 7), 0.6931471805599452, [(2, )]], [(0, 4, 6), 0.6931471805599452, [(2, )]], [(1, 2, 3), 0.3465735902799726, [(0, )]], [(1, 2, 7), 0.3465735902799726, [(0, )]], [(1, 3, 7), 0.3465735902799726, [(0, )]], [(2, 3, 7), 0.3465735902799726, [(0, ), (1, ), (0, 1)]], [(2, 4, 6), 0.3465735902799726, [(1, )]], [(1, 2, 3, 7), 0.3465735902799726, [(0, )]], ] def base2(x): return x / np.log(2.0) for result, answer in zip(results, answers): subset, phi, purviews = answer subset = tuple(separated_ces[i] for i in subset) relata = relations.Relata(PQR, subset) assert set(purviews) == set(result.ties) assert utils.eq(base2(phi), result.phi) assert relata == result.relata
def test_ria_ordering_and_equality(): assert ria(phi=1.0) < ria(phi=2.0) assert ria(phi=2.0) > ria(phi=1.0) assert ria(mechanism=(1, )) < ria(mechanism=(1, 2)) assert ria(mechanism=(1, 2)) >= ria(mechanism=(1, )) assert ria(purview=(1, )) < ria(purview=(1, 2)) assert ria(purview=(1, 2)) >= ria(purview=(1, )) assert ria(phi=1.0) == ria(phi=1.0) assert ria(phi=1.0) == ria(phi=(1.0 - constants.EPSILON / 2)) assert ria(phi=1.0) != ria(phi=(1.0 - constants.EPSILON * 2)) assert ria(direction=Direction.CAUSE) != ria(direction=Direction.EFFECT) assert ria(mechanism=(1, )) != ria(mechanism=(1, 2)) with config.override(PICK_SMALLEST_PURVIEW=True): assert ria(purview=(1, 2)) < ria(purview=(1, )) with pytest.raises(TypeError): ria(direction=Direction.CAUSE) < ria(direction=Direction.EFFECT) with pytest.raises(TypeError): ria(direction=Direction.CAUSE) >= ria(direction=Direction.EFFECT)
def test_maximal_states(): with config.override( PARTITION_TYPE="TRI", MEASURE="BLD", ): subsystem = examples.PQR() ces = relations.separate_ces(compute.ces(subsystem)) results = [relations.maximal_state(mice) for mice in ces] answers = [ np.array([[0, 0, 0]]), np.array([[0, 0, 0]]), np.array([[0, 0, 0], [1, 1, 0]]), np.array([[0, 0, 0]]), np.array([[0, 1, 0]]), np.array([[0, 0, 1]]), np.array([[1, 1, 0]]), np.array([[0, 0, 1]]), ] for result, answer in zip(results, answers): print(result) print(answer) assert np.array_equal(result, answer)
def test_system_repertoire_distance_must_be_symmetric(): a = np.ones((2, 2, 2)) / 8 b = np.ones((2, 2, 2)) / 8 with config.override(MEASURE='KLD'): with pytest.raises(ValueError): distance.system_repertoire_distance(a, b)
# ======================== """Note: all subsystems are loaded from `examples` internally instead of by pytest fixture because they must be constructed with the correct cache config. """ try: redis_available = cache.RedisConn().ping() except redis.exceptions.ConnectionError: redis_available = False # Decorator to skip a test if Redis is not available require_redis = pytest.mark.skipif(not redis_available, reason="requires a running Redis server") # Decorator to force a test to use the local cache local_cache = config.override(REDIS_CACHE=False) # Decorator to force a test to use Redis cache; skip test if Redis is not available redis_cache = lambda f: config.override(REDIS_CACHE=True)(require_redis(f)) def all_caches(test_func): """Decorator to run a test twice: once with the local cache and once with Redis. Any decorated test must add a `redis_cache` argument. """ @pytest.mark.parametrize("redis_cache,", [ require_redis((True, )), (False, ), ]) def wrapper(redis_cache, *args, **kwargs):
def test_ces_concepts_share_the_same_subsystem(parallel, s): with config.override(PARALLEL_CONCEPT_EVALUATION=parallel): ces = compute.ces(s) for concept in ces: assert concept.subsystem is ces.subsystem
def wrapper(redis_cache, *args, **kwargs): with config.override(REDIS_CACHE=redis_cache[0]): return test_func(redis_cache, *args, **kwargs)
def test_system_cut_styles(s): with config.override(SYSTEM_CUTS='3.0_STYLE'): assert compute.phi(s) == 2.3125 with config.override(SYSTEM_CUTS='CONCEPT_STYLE'): assert compute.phi(s) == 0.6875
def test_basic_nor_or(): # A system composed of NOR and OR (copy) gates, which mimics the basic # pyphi network nodes = 12 tpm = np.zeros((2 ** nodes, nodes)) for psi, ps in enumerate(utils.all_states(nodes)): cs = [0 for i in range(nodes)] if ps[5] == 0 and ps[11] == 0: cs[0] = 1 if ps[0] == 0: cs[1] = 1 if ps[1] == 1: cs[2] = 1 if ps[11] == 0: cs[3] = 1 if ps[3] == 0: cs[4] = 1 if ps[4] == 1: cs[5] = 1 if ps[2] == 0: cs[6] = 1 if ps[5] == 0: cs[7] = 1 if ps[6] == 0 and ps[7] == 0: cs[8] = 1 if ps[2] == 0 and ps[5] == 0: cs[9] = 1 if ps[9] == 1: cs[10] = 1 if ps[8] == 0 and ps[10] == 0: cs[11] = 1 tpm[psi, :] = cs cm = np.array([ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], ]) state = (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0) network = Network(tpm, cm=cm) # (0, 1, 2) compose the OR element, # (3, 4, 5) the COPY, # (6, 7, 8, 9, 10, 11) the XOR partition = ((0, 1, 2), (3, 4, 5), (6, 7, 8, 9, 10, 11)) output = (2, 5, 11) blackbox = macro.Blackbox(partition, output) assert blackbox.hidden_indices == (0, 1, 3, 4, 6, 7, 8, 9, 10) time = 3 sub = macro.MacroSubsystem(network, state, network.node_indices, blackbox=blackbox, time_scale=time) with config.override(CUT_ONE_APPROXIMATION=True): sia = compute.sia(sub) assert sia.phi == 1.958332 assert sia.cut == models.Cut((6,), (0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11))
def test_basic_nor_or(): # A system composed of NOR and OR (copy) gates, which mimics the basic # pyphi network nodes = 12 tpm = np.zeros((2**nodes, nodes)) for psi, ps in enumerate(utils.all_states(nodes)): cs = [0 for i in range(nodes)] if ps[5] == 0 and ps[11] == 0: cs[0] = 1 if ps[0] == 0: cs[1] = 1 if ps[1] == 1: cs[2] = 1 if ps[11] == 0: cs[3] = 1 if ps[3] == 0: cs[4] = 1 if ps[4] == 1: cs[5] = 1 if ps[2] == 0: cs[6] = 1 if ps[5] == 0: cs[7] = 1 if ps[6] == 0 and ps[7] == 0: cs[8] = 1 if ps[2] == 0 and ps[5] == 0: cs[9] = 1 if ps[9] == 1: cs[10] = 1 if ps[8] == 0 and ps[10] == 0: cs[11] = 1 tpm[psi, :] = cs cm = np.array([ [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], ]) state = (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0) network = Network(tpm, cm=cm) # (0, 1, 2) compose the OR element, # (3, 4, 5) the COPY, # (6, 7, 8, 9, 10, 11) the XOR partition = ((0, 1, 2), (3, 4, 5), (6, 7, 8, 9, 10, 11)) output = (2, 5, 11) blackbox = macro.Blackbox(partition, output) assert blackbox.hidden_indices == (0, 1, 3, 4, 6, 7, 8, 9, 10) time = 3 sub = macro.MacroSubsystem(network, state, network.node_indices, blackbox=blackbox, time_scale=time) with config.override(CUT_ONE_APPROXIMATION=True): sia = compute.sia(sub) assert sia.phi == 1.958332 assert sia.cut == models.Cut((6, ), (0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11))
# Test MICE caching # ======================== # NOTE: All subsystems are loaded from `examples` internally instead of by # pytest fixture because they must be constructed with the correct cache # config. redis_available = cache.redis_available() # Decorator to skip a test if Redis is not available require_redis = pytest.mark.skipif(not redis_available, reason="requires a running Redis server") # Decorator to force a test to use the local cache local_cache = config.override(REDIS_CACHE=False) # Decorator to force a test to use Redis cache; skip test if Redis is not # available redis_cache = lambda f: config.override(REDIS_CACHE=True)(require_redis(f)) def all_caches(test_func): """Decorator to run a test twice: once with the local cache and once with Redis. Any decorated test must add a `redis_cache` argument. """ @pytest.mark.parametrize("redis_cache,", [ require_redis((True,)), (False,),