예제 #1
0
    def test_entropy_hetero(self):
        n_out_ones = 3
        n_out_bits = 16
        errs = [{"fp": 1, "fn": 0}, {"fp": 0, "fn": 0}, {"fp": 2, "fn": 0}]
        errs2 = [1, 0, 2]
        v1 = entropy_hetero(errs, n_out_bits, n_out_ones)
        v2 = entropy_hetero(errs2, n_out_bits, n_out_ones)

        self.assertAlmostEqual(v1, 22.06592095594754)
        self.assertAlmostEqual(v2, 22.06592095594754)
예제 #2
0
파일: scm.py 프로젝트: hbp-unibi/pyscm
def scm_analysis(netw, terminate_times, delay=0.1, flag=False):
    """
    Anaylsis of the scm and compare to normal PyNAM (first spikes after input spike)
    :param netw: Should be of NetworkAnalysis type
    :param flag: Simple model flag
    :return: Information in the SCM, output-matrix and an errors object
    """

    # Calculate the SCM  information
    mat_out_res = calc_scm_output_matrix(netw, terminate_times, delay, max)
    N, n = mat_out_res.shape
    errs = entropy.calculate_errs(mat_out_res, netw["mat_out"])
    I = entropy.entropy_hetero(errs, n, netw["data_params"]["n_ones_out"])

    # Get the spike times of the source population
    tem, _, _ = pynam.network.NetworkInstance.flatten(netw["input_times"],
                                                      netw["input_indices"])
    start_times = np.unique(tem)
    for i in xrange(len(start_times)):
        start_times = start_times + 0.98 + delay * 2.5
    # calc_scm_output_matrix needs such an array
    start_times_ar = np.zeros((2, len(start_times)))
    start_times_ar[0] = start_times

    # Finally calculate the BiNAM information from the start
    mat_out_first = calc_scm_output_matrix(netw, start_times_ar, delay)
    errs_start = entropy.calculate_errs(mat_out_first, netw["mat_out"])
    I_start = entropy.entropy_hetero(errs_start, n,
                                     netw["data_params"]["n_ones_out"])

    # Calculate non-spiking information for Reference
    I_ref, mat_ref, errs_ref = netw.calculate_max_storage_capacity()
    # Noramlized information values
    I_norm = 0.0 if I_ref == 0.0 else I / float(I_ref)
    I_norm_start = 0.0 if I_ref == 0.0 else I_start / float(I_ref)

    # The number of False Positives and Negatives for both SCM and BiNAM
    fp = sum(map(lambda x: x["fp"], errs))
    fn = sum(map(lambda x: x["fn"], errs))
    fp_start = sum(map(lambda x: x["fp"], errs_start))
    fn_start = sum(map(lambda x: x["fn"], errs_start))

    if (flag):
        print "\t\t\t\tBiNAM \t\tSimple_Net"
    else:
        print "\t\t\t\tBiNAM \t\tSCM"
    print "Information:\t\t\t", format(I_start, '.2f'), "\t", format(I, '.2f')
    print "Normalized information:\t\t", format(I_norm_start,
                                                '.2f'), "\t\t", format(
                                                    I_norm, '.2f')
    print "False positives:\t\t", format(fp_start,
                                         '.0f'), "\t\t", format(fp, '.0f')
    print "False negatives:\t\t", format(fn_start,
                                         '.0f'), "\t\t", format(fn, '.0f')
    return I, I_norm, fp, fn, I_start, I_norm_start, fp_start, fn_start
예제 #3
0
파일: scm.py 프로젝트: hbp-sanncs/pyscm
def scm_analysis(netw, terminate_times, delay=0.1, flag=False):
    """
    Anaylsis of the scm and compare to normal PyNAM (first spikes after input spike)
    :param netw: Should be of NetworkAnalysis type
    :param flag: Simple model flag
    :return: Information in the SCM, output-matrix and an errors object
    """

    # Calculate the SCM  information
    mat_out_res = calc_scm_output_matrix(netw, terminate_times, delay, max)
    N, n = mat_out_res.shape
    errs = entropy.calculate_errs(mat_out_res, netw["mat_out"])
    I = entropy.entropy_hetero(errs, n, netw["data_params"]["n_ones_out"])

    # Get the spike times of the source population
    tem, _, _ = pynam.network.NetworkInstance.flatten(netw["input_times"],
                                                      netw["input_indices"])
    start_times = np.unique(tem)
    for i in xrange(len(start_times)):
        start_times = start_times + 0.98 + delay * 2.5
    # calc_scm_output_matrix needs such an array
    start_times_ar = np.zeros((2, len(start_times)))
    start_times_ar[0] = start_times

    # Finally calculate the BiNAM information from the start
    mat_out_first = calc_scm_output_matrix(netw, start_times_ar, delay)
    errs_start = entropy.calculate_errs(mat_out_first, netw["mat_out"])
    I_start = entropy.entropy_hetero(errs_start, n,
                                     netw["data_params"]["n_ones_out"])

    # Calculate non-spiking information for Reference
    I_ref, mat_ref, errs_ref = netw.calculate_max_storage_capacity()
    # Noramlized information values
    I_norm = 0.0 if I_ref == 0.0 else I / float(I_ref)
    I_norm_start = 0.0 if I_ref == 0.0 else I_start / float(I_ref)

    # The number of False Positives and Negatives for both SCM and BiNAM
    fp = sum(map(lambda x: x["fp"], errs))
    fn = sum(map(lambda x: x["fn"], errs))
    fp_start = sum(map(lambda x: x["fp"], errs_start))
    fn_start = sum(map(lambda x: x["fn"], errs_start))

    if (flag):
        print "\t\t\t\tBiNAM \t\tSimple_Net"
    else:
        print "\t\t\t\tBiNAM \t\tSCM"
    print "Information:\t\t\t", format(I_start, '.2f'), "\t", format(I, '.2f')
    print "Normalized information:\t\t", format(I_norm_start,
                                                '.2f'), "\t\t", format(I_norm,
                                                                       '.2f')
    print "False positives:\t\t", format(fp_start, '.0f'), "\t\t", format(fp,
                                                                          '.0f')
    print "False negatives:\t\t", format(fn_start, '.0f'), "\t\t", format(fn,
                                                                          '.0f')
    return I, I_norm, fp, fn, I_start, I_norm_start, fp_start, fn_start
예제 #4
0
    def test_entropy_hetero_uniform(self):
        n_samples = 10
        n_out_ones = 3
        n_out_bits = 16
        err = 1.5
        errs = [err for _ in xrange(n_samples)]
        errs2 = [{"fp": err, "fn": 0} for _ in xrange(n_samples)]

        v1 = entropy_hetero_uniform(err, n_samples, n_out_bits, n_out_ones)
        v2 = entropy_hetero(errs, n_out_bits, n_out_ones)
        v3 = entropy_hetero(errs2, n_out_bits, n_out_ones)
        self.assertAlmostEqual(v1, v2)
        self.assertAlmostEqual(v1, v3)
예제 #5
0
    def test_entropy_hetero_uniform(self):
        n_samples = 10
        n_out_ones = 3
        n_out_bits = 16
        err = 1.5
        errs = [err for _ in xrange(n_samples)]
        errs2 = [{"fp": err, "fn": 0} for _ in xrange(n_samples)]

        v1 = entropy_hetero_uniform(err, n_samples, n_out_bits, n_out_ones)
        v2 = entropy_hetero(errs, n_out_bits, n_out_ones)
        v3 = entropy_hetero(errs2, n_out_bits, n_out_ones)
        self.assertAlmostEqual(v1, v2)
        self.assertAlmostEqual(v1, v3)
예제 #6
0
    def test_entropy_hetero(self):
        n_out_ones = 3
        n_out_bits = 16
        errs = [
            {
                "fp": 1,
                "fn": 0
            },
            {
                "fp": 0,
                "fn": 0
            },
            {
                "fp": 2,
                "fn": 0
            }
        ]
        errs2 = [1, 0, 2]
        v1 = entropy_hetero(errs, n_out_bits, n_out_ones)
        v2 = entropy_hetero(errs2, n_out_bits, n_out_ones)

        self.assertAlmostEqual(v1, 22.06592095594754)
        self.assertAlmostEqual(v2, 22.06592095594754)