def test_apply_cut(s): cut = Cut((0, 1), (2, )) cut_s = s.apply_cut(cut) assert s.network == cut_s.network assert s.state == cut_s.state assert s.node_indices == cut_s.node_indices assert np.array_equal(cut_s.tpm, s.tpm) assert np.array_equal(cut_s.cm, cut.apply_cut(s.cm))
def test_apply_cut(s): cut = Cut((0, 1), (2,)) cut_s = s.apply_cut(cut) assert s.network == cut_s.network assert s.state == cut_s.state assert s.node_indices == cut_s.node_indices assert np.array_equal(cut_s.tpm, s.tpm) assert np.array_equal(cut_s.cm, cut.apply_cut(s.cm))
def test_validate_cut_nodes_equal_subsystem_nodes(s): assert s.node_indices == (0, 1, 2) cut = Cut((0, ), (1, 2)) # A-ok Subsystem(s.network, s.state, s.node_indices, cut=cut) cut = Cut((0, ), (1, )) # missing node 2 in cut with pytest.raises(ValueError): Subsystem(s.network, s.state, s.node_indices, cut=cut) cut = Cut((0, ), (1, 2)) # missing node 2 in subsystem with pytest.raises(ValueError): Subsystem(s.network, s.state, (0, 1), cut=cut)
def test_find_cut_matrix(s, big_subsys_0_thru_3): cut = Cut((0, ), (1, 2)) cut_s = Subsystem(s.node_indices, s.network, cut=cut, mice_cache=s._mice_cache) answer_s = np.array([[0, 1, 1], [0, 0, 0], [0, 0, 0]]) cut = Cut((0, 1), (2, 3)) cut_big = Subsystem(big_subsys_0_thru_3.node_indices, big_subsys_0_thru_3.network, cut=cut, mice_cache=big_subsys_0_thru_3._mice_cache) answer_big = np.array([[0, 0, 1, 1], [0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0, 0]]) assert np.array_equal(cut_s.cut_matrix, answer_s) assert np.array_equal(cut_big.cut_matrix, answer_big)
def test_find_mip_parallel_standard_example(s, flushcache, restore_fs_cache): flushcache() unpartitioned_constellation = constellation(s) bipartitions = utils.directed_bipartition(s.node_indices)[1:-1] cuts = [Cut(bipartition[0], bipartition[1]) for bipartition in bipartitions] min_mip = _null_bigmip(s) min_mip.phi = float('inf') mip = _find_mip_parallel(s, cuts, unpartitioned_constellation, min_mip) check_mip(mip, standard_answer)
def test_find_mip_sequential_micro(micro_s, flushcache, restore_fs_cache): flushcache() unpartitioned_constellation = constellation(micro_s) bipartitions = utils.directed_bipartition(micro_s.node_indices)[1:-1] cuts = [Cut(bipartition[0], bipartition[1]) for bipartition in bipartitions] min_mip = _null_bigmip(micro_s) min_mip.phi = float('inf') mip = _find_mip_sequential(micro_s, cuts, unpartitioned_constellation, min_mip) check_mip(mip, micro_answer)
def test_find_cut_matrix(s, big_subsys_0_thru_3): cut = Cut((0, ), (1, 2)) cut_s = Subsystem(s.network, s.state, s.node_indices, cut=cut) answer_s = np.array([[0, 1, 1], [0, 0, 0], [0, 0, 0]]) assert np.array_equal(cut_s.cut_matrix, answer_s) cut = Cut((0, 1), (2, 3)) cut_big = Subsystem(big_subsys_0_thru_3.network, big_subsys_0_thru_3.state, big_subsys_0_thru_3.node_indices, cut=cut) answer_big = np.array([[0, 0, 1, 1], [0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0, 0]]) assert np.array_equal(cut_big.cut_matrix, answer_big) null_cut_matrix = np.array([ [0, 0, 0], [0, 0, 0], [0, 0, 0], ]) assert np.array_equal(s.cut_matrix, null_cut_matrix)
def test_find_mip_parallel_standard_example(s, flushcache, restore_fs_cache): flushcache() initial = (config.PARALLEL_CUT_EVALUATION, config.NUMBER_OF_CORES) config.PARALLEL_CUT_EVALUATION, config.NUMBER_OF_CORES = True, -2 unpartitioned_constellation = constellation(s) bipartitions = utils.directed_bipartition(s.node_indices)[1:-1] cuts = [Cut(bipartition[0], bipartition[1]) for bipartition in bipartitions] min_mip = _null_bigmip(s) min_mip.phi = float('inf') mip = _find_mip_parallel(s, cuts, unpartitioned_constellation, min_mip) check_mip(mip, standard_answer) config.PARALLEL_CUT_EVALUATION, config.NUMBER_OF_CORES = initial
def test_find_mip_sequential_noised_example(s_noised, flushcache, restore_fs_cache): flushcache() initial = config.PARALLEL_CUT_EVALUATION config.PARALLEL_CUT_EVALUATION = False unpartitioned_constellation = constellation(s_noised) bipartitions = utils.directed_bipartition(s_noised.node_indices)[1:-1] cuts = [Cut(bipartition[0], bipartition[1]) for bipartition in bipartitions] min_mip = _null_bigmip(s_noised) min_mip.phi = float('inf') mip = _find_mip_sequential(s_noised, cuts, unpartitioned_constellation, min_mip) check_mip(mip, noised_answer) config.PARALLEL_CUT_EVALUATION = initial
def test_find_mip_parallel_micro(micro_s, flushcache, restore_fs_cache): flushcache() initial = config.PARALLEL_CUT_EVALUATION config.PARALLEL_CUT_EVALUATION = True unpartitioned_constellation = constellation(micro_s) bipartitions = utils.directed_bipartition(micro_s.node_indices)[1:-1] cuts = [Cut(bipartition[0], bipartition[1]) for bipartition in bipartitions] min_mip = _null_bigmip(micro_s) min_mip.phi = float('inf') mip = _find_mip_parallel(micro_s, cuts, unpartitioned_constellation, min_mip) check_mip(mip, micro_answer) config.PARALLEL_CUT_EVALUATION = initial
def big_mip_bipartitions(nodes): """pyphi.compute.big_mip_bipartitions() Return all |big_phi| cuts for the given nodes. This value changes based on `config.CUT_ONE_APPROXIMATION`. Args: nodes tuple(int): The node indices to partition. Returns: list(Cut): All unidirectional partitions. """ if config.CUT_ONE_APPROXIMATION: bipartitions = utils.directed_bipartition_of_one(nodes) else: # Skip the first and last (trivial, null cut) bipartitions bipartitions = utils.directed_bipartition(nodes)[1:-1] return [ Cut(bipartition[0], bipartition[1]) for bipartition in bipartitions ]
import pytest from itertools import chain from pyphi import Subsystem from pyphi.models import Mice, Cut from pyphi.utils import phi_eq import example_networks # Expected results {{{ # ==================== s = example_networks.s() directions = ('past', 'future') cuts = (None, Cut((1, 2), (0, ))) subsystem = { cut: Subsystem(s.node_indices, s.network, cut=cut) for cut in cuts } expected_purview_indices = { cuts[0]: { 'past': { (1, ): (2, ), (2, ): (0, 1), (0, 1): (1, 2), (0, 1, 2): (0, 1, 2) }, 'future': { (1, ): (0, ),
[2], np.array([0.5, 0.5]).reshape(1, 1, 2, order="F") ), ( 'cause_repertoire', standard_subsystem, [1], [], np.array([1]) ), # }}} # Full network, with cut {{{ # -------------------------- ( 'cause_repertoire', Subsystem(standard, standard_subsystem.state, full, cut=Cut((2,), (0, 1))), [0], [1], np.array([1 / 3, 2 / 3]).reshape(1, 2, 1, order="F") ), # }}} # Subset, with cut {{{ # -------------------- ( 'cause_repertoire', Subsystem(standard, standard_subsystem.state, (1, 2), cut=Cut((1,), (2,))), [2], [1, 2], np.array([0.25, 0.25, 0.25, 0.25]).reshape(1, 2, 2, order="F") ), (
def test_cut_mechanisms(s): assert list(s.cut_mechanisms) == [] assert list(s.apply_cut(Cut((0, 1), (2,))).cut_mechanisms) == [ (0, 2), (1, 2), (0, 1, 2)]
def test_cut_mechanisms(s): assert s.cut_mechanisms == () assert s.apply_cut(Cut( (0, 1), (2, ))).cut_mechanisms == ((0, 2), (1, 2), (0, 1, 2))
def test_is_cut(s): assert s.is_cut is False s = Subsystem(s.network, s.state, s.node_indices, cut=Cut((0, ), (1, 2))) assert s.is_cut is True
np.array([0.5, 0.5]).reshape(2, 1, 1, order="F")), ('cause_repertoire', standard_subsystem, [0, 1], [0, 2], np.array([0.5, 0.5, 0.0, 0.0]).reshape(2, 1, 2, order="F")), ('cause_repertoire', standard_subsystem, [1], [2], np.array([1.0, 0.0]).reshape(1, 1, 2, order="F")), ('cause_repertoire', standard_subsystem, [], [2], np.array([0.5, 0.5]).reshape(1, 1, 2, order="F")), ('cause_repertoire', standard_subsystem, [1], [], np.array([1])), # }}} # Full network, with cut {{{ # -------------------------- ('cause_repertoire', Subsystem(standard, standard_subsystem.state, full, cut=Cut((2, ), (0, 1))), [0], [1], np.array([1 / 3, 2 / 3]).reshape(1, 2, 1, order="F")), # }}} # Subset, with cut {{{ # -------------------- ('cause_repertoire', Subsystem(standard, standard_subsystem.state, (1, 2), cut=Cut((1, ), (2, ))), [2], [1, 2], np.array([0.25, 0.25, 0.25, 0.25]).reshape(1, 2, 2, order="F")), ('cause_repertoire', Subsystem(standard, standard_subsystem.state, (1, 2), cut=Cut((1, ), (2, ))), [2], [2], np.array([0.5, 0.5]).reshape(1,
('cause_repertoire', Subsystem(full, standard, cut=None), [0], [0], np.array([0.5, 0.5]).reshape(2, 1, 1, order="F")), ('cause_repertoire', Subsystem(full, standard, cut=None), [0], [0], np.array([0.5, 0.5]).reshape(2, 1, 1, order="F")), ('cause_repertoire', Subsystem(full, standard, cut=None), [0, 1], [0, 2], np.array([0.5, 0.5, 0.0, 0.0]).reshape(2, 1, 2, order="F")), ('cause_repertoire', Subsystem(full, standard, cut=None), [1], [2], np.array([1.0, 0.0]).reshape(1, 1, 2, order="F")), ('cause_repertoire', Subsystem(full, standard, cut=None), [], [2], np.array([0.5, 0.5]).reshape(1, 1, 2, order="F")), ('cause_repertoire', Subsystem(full, standard, cut=None), [1], [], np.array([1])), # }}} # Full network, with cut {{{ # -------------------------- ('cause_repertoire', Subsystem(full, standard, cut=Cut( (2, ), (0, 1))), [0], [1], np.array([1 / 3, 2 / 3]).reshape(1, 2, 1, order="F")), # }}} # Subset, with cut {{{ # -------------------- ('cause_repertoire', Subsystem((1, 2), standard, cut=Cut( (1, ), (2, ))), [2], [1, 2], np.array([0.25, 0.25, 0.25, 0.25]).reshape(1, 2, 2, order="F")), ('cause_repertoire', Subsystem((1, 2), standard, cut=Cut( (1, ), (2, ))), [2], [2], np.array([0.5, 0.5]).reshape(1,