예제 #1
0
    def make_cpmf_PrXcZ(self, X, Z, PrZ=None):
        if PrZ is None:
            PrZ = self.AD_tree.make_pmf(list(Z))

        unsorted_variables = [X] + Z
        joint_variables = sorted(unsorted_variables)
        index = {var: joint_variables.index(var) for var in joint_variables}

        PrXZ = self.AD_tree.make_pmf(joint_variables)

        PrXcZ = CPMF(None, None)

        for joint_key, joint_p in PrXZ.items():
            zkey = tuple([joint_key[index[zvar]] for zvar in Z])
            varkey = [
                joint_key[index[var]] for var in unsorted_variables
                if var not in Z
            ][0]
            if len(zkey) == 1:
                zkey = zkey[0]
            try:
                pmf = PrXcZ.conditional_probabilities[zkey]
            except KeyError:
                pmf = PMF(None)
                PrXcZ.conditional_probabilities[zkey] = pmf
            try:
                pmf.probabilities[varkey] = joint_p / PrZ.p(zkey)
            except ZeroDivisionError:
                pass

        return (PrXcZ, PrXZ)
예제 #2
0
def test_joint_variables_pmf():
    animals = Variable(['cat', 'dog', 'cat', 'mouse', 'dog', 'cat'])
    animals.ID = 3
    animals.name = 'animals'

    colors = Variable(['gray', 'yellow', 'brown', 'silver', 'white', 'gray'])
    colors.ID = 2
    colors.name = 'colors'

    sizes = Variable(['small', 'small', 'large', 'small', 'normal', 'small'])
    sizes.ID = 1
    sizes.name = 'sizes'

    fauna = JointVariables(sizes, colors, animals)
    fauna.update_values()
    assert [1, 2, 3] == fauna.variableIDs
    assert fauna.variables[0] is sizes
    assert fauna.variables[1] is colors
    assert fauna.variables[2] is animals

    expected_values = [('large', 'brown', 'cat'),
                       ('normal', 'white', 'dog'),
                       ('small', 'gray', 'cat'),
                       ('small', 'silver', 'mouse'),
                       ('small', 'yellow', 'dog')]
    assert fauna.values == expected_values

    PrFauna = PMF(fauna)
    assert PrFauna.p('small', 'gray', 'cat') == 2 / 6
    assert PrFauna.p('small', 'silver', 'mouse') == 1 / 6
    assert PrFauna.p('small', 'silver', 'dog') == 0

    singleton_joint = JointVariables(animals)
    assert ['cat', 'dog', 'cat', 'mouse', 'dog', 'cat'] == singleton_joint.instances()
예제 #3
0
def test_conditional_pmf__multiple_values():
    sizes = Variable(['small', 'small', 'large', 'small', 'normal', 'small'])
    sizes.ID = 1
    sizes.name = 'sizes'

    colors = Variable(['gray', 'yellow', 'brown', 'silver', 'white', 'gray'])
    colors.ID = 2
    colors.name = 'colors'

    animals = Variable(['cat', 'dog', 'cat', 'snake', 'dog', 'cat'])
    animals.ID = 3
    animals.name = 'animals'

    is_pet = Variable(['yes', 'yes', 'yes', 'maybe', 'yes', 'yes'])
    is_pet.ID = 4
    is_pet.name = 'is_pet'

    Pr = CPMF(JointVariables(colors, is_pet), JointVariables(sizes, animals))

    assert Pr.given('small', 'cat').p('gray', 'yes') == 2 / 2
    assert Pr.given('small', 'cat').p('yellow', 'yes') == 0 / 1
    assert Pr.given('small', 'cat').p('brown', 'maybe') == 0 / 1

    assert Pr.given('small', 'dog').p('yellow', 'yes') == 1 / 1
    assert Pr.given('small', 'dog').p('yellow', 'maybe') == 0 / 1
    assert Pr.given('small', 'dog').p('silver', 'maybe') == 0 / 1

    assert Pr.given('large', 'cat').p('brown', 'yes') == 1 / 1
    assert Pr.given('large', 'cat').p('yellow', 'yes') == 0 / 1

    assert Pr.given('small', 'snake').p('silver', 'maybe') == 1 / 1
    assert Pr.given('small', 'snake').p('silver', 'no') == 0 / 1

    assert Pr.given('normal', 'dog').p('white', 'yes') == 1 / 1
    assert Pr.given('normal', 'dog').p('silver', 'yes') == 0 / 1
    assert Pr.given('normal', 'dog').p('yellow', 'maybe') == 0 / 1

    SA = JointVariables(sizes, animals)
    PrAll = CPMF(JointVariables(colors, is_pet), SA)
    PrSA = PMF(SA)
    PrCcSA = CPMF(colors, SA)
    PrIPcSA = CPMF(is_pet, SA)

    test_p_all = 0.0
    test_p_c = 0.0
    test_p_ip = 0.0

    for (sa, psa) in PrSA.items():
        for (c, pcsa) in PrCcSA.given(sa).items():
            test_p_c += pcsa * PrSA.p(sa)
            for (ip, pipsa) in PrIPcSA.given(sa).items():
                pall = PrAll.given(sa).p(c, ip)
                test_p_all += pall * PrSA.p(sa)
                test_p_ip += pipsa * PrSA.p(sa)

    assert almostEqual(1, test_p_all)
    assert almostEqual(1, test_p_c)
    assert almostEqual(1, test_p_ip)
예제 #4
0
    def make_pmf(self, variables):
        variables = sorted(variables)
        joint_ct = self.root.make_contingency_table(self, variables)

        pmf = PMF(None)
        total_count = 1.0 * self.root.count
        for key, count in joint_ct.items():
            pmf.probabilities[key] = count / total_count

        pmf.variable = JointVariablesIDs(variables)

        return pmf
예제 #5
0
def mutual_information(
    PrXY: PMF,
    PrX: PMF,
    PrY: PMF,
    base=2,
) -> float:

    logarithm = create_logarithm_function(base)
    MI = 0.0
    for (x, px) in PrX.items():
        for (y, py) in PrY.items():
            pxy = PrXY.p(x, y)
            if pxy == 0 or px == 0 or py == 0:
                continue
            else:
                pMI = pxy * logarithm(pxy / (px * py))
                MI += pMI
    return MI
예제 #6
0
    def get_joint_entropy_term(self, *variables):
        jht_key = self.create_flat_variable_set(*variables)
        if len(jht_key) == 0:
            return 0

        self.JHT_reads += 1

        try:
            H = self.JHT[jht_key]
        except KeyError:
            self.JHT_misses += 1
            joint_variables = self.datasetmatrix.get_variables('X', jht_key)
            pmf = PMF(joint_variables)
            H = - pmf.expected_value(lambda v, p: math.log(p))
            self.JHT[jht_key] = H
            if self.DoF_calculator.requires_pmfs:
                self.DoF_calculator.set_context_pmfs(pmf, None, None, None)

        return H
예제 #7
0
def assert_pmf_adtree_vs_datasetmatrix(ds, adtree, variables):
    dm = ds.datasetmatrix
    if isinstance(variables, int):
        variables = [variables]

    calculated_pmf = adtree.make_pmf(variables)
    calculated_pmf.remove_zeros()

    variables = dm.get_variables('X', variables)
    expected_pmf = PMF(variables)

    assert expected_pmf.probabilities == calculated_pmf.probabilities
예제 #8
0
def calculate_pmf_for_cmi(
    X: Variable,
    Y: Variable,
    Z: Union[Variable, JointVariables],
) -> tuple[CPMF, CPMF, CPMF, PMF]:

    PrXYcZ = CPMF(JointVariables(X, Y), Z)
    PrXcZ = CPMF(X, Z)
    PrYcZ = CPMF(Y, Z)
    PrZ = PMF(Z)

    return (PrXYcZ, PrXcZ, PrYcZ, PrZ)
예제 #9
0
def test_pmf_remove_from_key() -> None:
    pmf = PMF(None)
    assert pmf.remove_from_key(('A', 'B', 'C', 'D'), 2) == ('A', 'B', 'D')
    assert pmf.remove_from_key(('A', 'B', 'C', 'D'), 0) == ('B', 'C', 'D')
    assert pmf.remove_from_key(('A', 'B', 'C', 'D'), 3) == ('A', 'B', 'C')
    assert pmf.remove_from_key(('A', 'B'), 0) == ('B',)
    assert pmf.remove_from_key(('A', 'B'), 1) == ('A',)
    assert pmf.remove_from_key(('A',), 0) == tuple()
예제 #10
0
def test_pmf_expected_values():
    animals = Variable(['cat', 'dog', 'cat', 'mouse', 'dog', 'cat', 'cat', 'dog'])
    PrAnimals = PMF(animals)

    assert almostEqual(1.0, PrAnimals.expected_value(lambda v, p: 1))

    # Test calculations of base-e entropy and base-2 entropy.
    assert almostEqual(0.97431475, (-1) * PrAnimals.expected_value(lambda v, p: math.log(p)))
    assert almostEqual(1.40563906, (-1) * PrAnimals.expected_value(lambda v, p: math.log2(p)))

    # Expected word length.
    assert PrAnimals.expected_value(lambda v, p: len(v)) == 3.25
예제 #11
0
def test_make_cpmf_PrXcZ_variant_1() -> None:
    V0 = Variable([0, 1, 1, 1, 0, 1, 0, 1])
    V1 = Variable([0, 0, 1, 1, 0, 1, 1, 1])

    PrXZ = PMF(JointVariables(V0, V1))
    PrXZ.IDs(1000, 1111)

    assert PrXZ.IDs() == (1000, 1111)

    assert PrXZ.p((0, 0)) == 2 / 8
    assert PrXZ.p((0, 1)) == 1 / 8
    assert PrXZ.p((1, 0)) == 1 / 8
    assert PrXZ.p((1, 1)) == 4 / 8
예제 #12
0
def conditional_mutual_information(
    PrXYcZ: CPMF,
    PrXcZ: CPMF,
    PrYcZ: CPMF,
    PrZ: PMF,
    base: Union[float, str] = 2,
) -> float:

    logarithm = create_logarithm_function(base)
    cMI = 0.0
    for (z, pz) in PrZ.items():
        for (x, pxcz) in PrXcZ.given(z).items():
            for (y, pycz) in PrYcZ.given(z).items():
                pxycz = PrXYcZ.given(z).p(x, y)
                if pxycz == 0 or pxcz == 0 or pycz == 0:
                    continue
                else:
                    pcMI = pz * pxycz * logarithm(pxycz / (pxcz * pycz))
                    cMI += pcMI
    return abs(cMI)
예제 #13
0
def test_single_variable_pmf():
    variable = Variable(numpy.array([3, 5, 1, 1, 4, 3, 7, 0, 2, 1, 0, 5, 4, 7, 2, 4]))
    variable.ID = 1
    variable.name = 'test_variable_1'

    variable.update_values()
    assert [0, 1, 2, 3, 4, 5, 7] == variable.values

    PrVariable = PMF(variable)
    expected_counts = {0: 2,
                       1: 3,
                       2: 2,
                       3: 2,
                       4: 3,
                       5: 2,
                       7: 2}
    assert PrVariable.value_counts == expected_counts

    expected_counts = {0: 2 / 16,
                       1: 3 / 16,
                       2: 2 / 16,
                       3: 2 / 16,
                       4: 3 / 16,
                       5: 2 / 16,
                       7: 2 / 16}
    assert PrVariable.probabilities == expected_counts

    assert 1 == sum(PrVariable.values())

    assert 2 / 16 == PrVariable.p(3)
    assert 2 / 16 == PrVariable.p(2)
    assert 2 / 16 == PrVariable.p(5)

    ev = 0
    for (v, pv) in PrVariable.items():
        ev += pv * v

    assert 3.0625 == ev
예제 #14
0
def test_conditional_pmf__from_bayesian_network():
    configuration = dict()
    configuration['sourcepath'] = testutil.bif_folder / 'survey.bif'
    configuration['sample_count'] = int(4e4)
    # Using a random seed of 42 somehow requires 2e6 samples to pass, but
    # with the seed 1984, it is sufficient to generate only 4e4. Maybe the
    # random generator is biased somehow?
    configuration['random_seed'] = 1984
    configuration['values_as_indices'] = False
    configuration['objectives'] = ['R', 'TRN']

    bayesian_network = BayesianNetwork.from_bif_file(configuration['sourcepath'], use_cache=False)
    bayesian_network.finalize()

    sbnds = SampledBayesianNetworkDatasetSource(configuration)
    sbnds.reset_random_seed = True
    datasetmatrix = sbnds.create_dataset_matrix('test_sbnds')

    assert ['AGE', 'EDU', 'OCC', 'SEX'] == datasetmatrix.column_labels_X
    assert ['R', 'TRN'] == datasetmatrix.column_labels_Y

    AGE = Variable(datasetmatrix.get_column_by_label('X', 'AGE'))
    PrAge = PMF(AGE)

    SEX = Variable(datasetmatrix.get_column_by_label('X', 'SEX'))
    PrSex = PMF(SEX)

    assert_PMF_AlmostEquals_BNProbDist(
        bayesian_network.variable_nodes['AGE'].probdist,
        PrAge)

    assert_PMF_AlmostEquals_BNProbDist(
        bayesian_network.variable_nodes['SEX'].probdist,
        PrSex)

    EDU = Variable(datasetmatrix.get_column_by_label('X', 'EDU'))
    PrEdu = CPMF(EDU, given=JointVariables(AGE, SEX))

    assert_CPMF_AlmostEquals_BNProbDist(
        bayesian_network.variable_nodes['EDU'].probdist,
        PrEdu)

    OCC = Variable(datasetmatrix.get_column_by_label('X', 'OCC'))
    PrOcc = CPMF(OCC, given=EDU)

    assert_CPMF_AlmostEquals_BNProbDist(
        bayesian_network.variable_nodes['OCC'].probdist,
        PrOcc)

    R = Variable(datasetmatrix.get_column_by_label('Y', 'R'))
    PrR = CPMF(R, given=EDU)

    assert_CPMF_AlmostEquals_BNProbDist(
        bayesian_network.variable_nodes['R'].probdist,
        PrR)

    TRN = Variable(datasetmatrix.get_column_by_label('Y', 'TRN'))
    PrTRN = CPMF(TRN, given=JointVariables(OCC, R))

    assert_CPMF_AlmostEquals_BNProbDist(
        bayesian_network.variable_nodes['TRN'].probdist,
        PrTRN)
예제 #15
0
    def G_test_conditionally_independent(self, X: int, Y: int,
                                         Z: list[int]) -> CITestResult:
        (VarX, VarY, VarZ) = self.load_variables(X, Y, Z)

        result = CITestResult()
        result.start_timing()

        PrZ: PMF
        PrXcZ: CPMF
        PrYcZ: CPMF
        PrXYcZ: CPMF

        if len(Z) == 0:
            PrXY = PMF(JointVariables(VarX, VarY))
            PrX = PMF(VarX)
            PrY = PMF(VarY)
            PrZ = OmegaPMF()
            PrXYcZ = OmegaCPMF(PrXY)
            PrXcZ = OmegaCPMF(PrX)
            PrYcZ = OmegaCPMF(PrY)

            if self.DoF_calculator.requires_pmfs:
                self.DoF_calculator.set_context_pmfs(PrXY, PrX, PrY, None)

        else:
            PrXYZ = PMF(JointVariables(VarX, VarY, VarZ))
            PrXZ = PMF(JointVariables(VarX, VarZ))
            PrYZ = PMF(JointVariables(VarY, VarZ))
            PrZ = PMF(VarZ)

            PrXcZ = PrXZ.condition_on(PrZ)
            PrYcZ = PrYZ.condition_on(PrZ)
            PrXYcZ = PrXYZ.condition_on(PrZ)

            if self.DoF_calculator.requires_pmfs:
                self.DoF_calculator.set_context_pmfs(PrXYZ, PrXZ, PrYZ, PrZ)

        self.DoF_calculator.set_context_variables(X, Y, Z)

        if self.DoF_calculator.requires_cpmfs:
            self.DoF_calculator.set_context_cpmfs(PrXYcZ, PrXcZ, PrYcZ, PrZ)

        DoF = self.DoF_calculator.calculate_DoF(X, Y, Z)

        if not self.sufficient_samples(DoF):
            result.end_timing()
            result.index = self.ci_test_counter + 1
            result.set_insufficient_samples()
            result.set_variables(VarX, VarY, VarZ)
            result.extra_info = ' DoF {}'.format(DoF)
            return result

        G = self.G_value(PrXYcZ, PrXcZ, PrYcZ, PrZ)
        p = chi2.cdf(G, DoF)

        independent = None
        if p < self.significance:
            independent = True
        else:
            independent = False

        result.end_timing()
        result.index = self.ci_test_counter + 1
        result.set_independent(independent, self.significance)
        result.set_variables(VarX, VarY, VarZ)
        result.set_statistic('G', G, dict())
        result.set_distribution('chi2', p, {'DoF': DoF})

        result.extra_info = ' DoF {}'.format(DoF)

        return result
예제 #16
0
    def make_pmfs_from_datasetmatrix(self, X: int, Y: int, Zl: list[int]) -> tuple[CPMF, CPMF, CPMF, PMF]:
        PrZ: PMF
        PrXcZ: CPMF
        PrYcZ: CPMF
        PrXYcZ: CPMF

        (VarX, VarY, VarZ) = self.load_variables(X, Y, Zl)
        if len(Zl) == 0:
            PrXY = PMF(JointVariables(VarX, VarY))
            PrX = PMF(VarX)
            PrY = PMF(VarY)
            PrZ = OmegaPMF()
            PrXYcZ = OmegaCPMF(PrXY)
            PrXcZ = OmegaCPMF(PrX)
            PrYcZ = OmegaCPMF(PrY)

        else:
            PrXYZ = PMF(JointVariables(VarX, VarY, VarZ))
            PrXZ = PMF(JointVariables(VarX, VarZ))
            PrYZ = PMF(JointVariables(VarY, VarZ))
            PrZ = PMF(VarZ)

            PrXcZ = PrXZ.condition_on(PrZ)
            PrYcZ = PrYZ.condition_on(PrZ)
            PrXYcZ = PrXYZ.condition_on(PrZ)

        return (PrXYcZ, PrXcZ, PrYcZ, PrZ)
예제 #17
0
def test_pmf_summing_over_variable():
    V0 = Variable([0, 1, 1, 1, 0, 1, 0, 1])
    V1 = Variable([0, 0, 1, 1, 0, 1, 1, 1])
    V2 = Variable([0, 0, 0, 0, 1, 0, 1, 1])
    V3 = Variable([0, 0, 0, 0, 0, 0, 1, 1])

    V0.ID = 1000
    V1.ID = 1111
    V2.ID = 1222
    V3.ID = 1333

    Pr = PMF(JointVariables(V0, V1, V2, V3))
    assert Pr.IDs() == (1000, 1111, 1222, 1333)

    assert Pr.p((0, 0, 0, 0)) == 1 / 8
    assert Pr.p((1, 0, 0, 0)) == 1 / 8
    assert Pr.p((1, 1, 0, 0)) == 3 / 8
    assert Pr.p((0, 0, 1, 0)) == 1 / 8
    assert Pr.p((0, 1, 1, 1)) == 1 / 8
    assert Pr.p((1, 1, 1, 1)) == 1 / 8

    Pr = Pr.sum_over(V2.ID)
    assert sum(Pr.probabilities.values()) == 1

    assert Pr.p((0, 0, 0)) == 2 / 8
    assert Pr.p((1, 0, 0)) == 1 / 8
    assert Pr.p((1, 1, 0)) == 3 / 8
    assert Pr.p((0, 1, 1)) == 1 / 8
    assert Pr.p((1, 1, 1)) == 1 / 8
    assert Pr.IDs() == (V0.ID, V1.ID, V3.ID)

    Pr = Pr.sum_over(V1.ID)
    assert sum(Pr.probabilities.values()) == 1

    assert Pr.p((0, 0)) == 2 / 8
    assert Pr.p((1, 0)) == 4 / 8
    assert Pr.p((0, 1)) == 1 / 8
    assert Pr.p((1, 1)) == 1 / 8
    assert Pr.IDs() == (V0.ID, V3.ID)

    Pr = Pr.sum_over(V0.ID)
    assert sum(Pr.probabilities.values()) == 1

    print(Pr.probabilities)

    assert Pr.p(0) == 6 / 8
    assert Pr.p(1) == 2 / 8
    assert Pr.IDs() == (V3.ID,)
예제 #18
0
 def create_joint_pmf(self, values_as_indices=True) -> PMF:
     pmf = PMF(None)
     pmf.probabilities = self.joint_values_and_probabilities(
         values_as_indices=values_as_indices)
     pmf.IDs(*self.variable_IDs)
     return pmf
예제 #19
0
def calculate_pmf_for_mi(X: Variable, Y: Variable) -> tuple[PMF, PMF, PMF]:
    PrXY = PMF(JointVariables(X, Y))
    PrX = PMF(X)
    PrY = PMF(Y)

    return (PrXY, PrX, PrY)