Ejemplo n.º 1
0
    def test_cubic_ximax(self):
        # Test exceeding ximax
        xi_ximax, p_vals_ximax, k_ximax, test_aborted = cubic.cubic(
            self.data_signal, alpha=1, ximax=self.ximax)

        self.assertEqual(test_aborted, True)
        self.assertEqual(xi_ximax - 1, self.ximax)
Ejemplo n.º 2
0
    def test_cubic_ximax(self):
        # Test exceeding ximax
        xi_ximax, p_vals_ximax, k_ximax, test_aborted = cubic.cubic(
            self.data_signal, alpha=1, ximax=self.ximax)

        self.assertEqual(test_aborted, True)
        self.assertEqual(xi_ximax - 1, self.ximax)
Ejemplo n.º 3
0
    def test_cubic_ximax(self):
        # Test exceeding ximax
        with self.assertWarns(UserWarning):
            xi_ximax, p_vals_ximax, k_ximax, test_aborted = cubic.cubic(
                self.data_signal, alpha=1, max_iterations=self.ximax)

        self.assertEqual(test_aborted, True)
        self.assertEqual(xi_ximax - 1, self.ximax)
Ejemplo n.º 4
0
def cubic_task(h5_file, binsize, alpha):
    """
        Task Manifest Version: 1
        Full Name: cubic_task
        Caption: cubic
        Author: Elephant_Developers
        Description: |
            Analyses the correlation of parallel recorded spike trains
        Categories:
            - FDAT
        Compatible_queues: ['cscs_viz', 'cscs_bgq', 'epfl_viz']
        Accepts:
            h5_file:
                type: application/unknown
                description: Input file that contains spiking data from a
                    HDF5 file.
            binsize:
                type: double
                description: Bin width used to compute the PSTH in ms.
            alpha:
                type: double
                description: The significance level of the test.

        Returns:
            res: image/png
    """
    h5_path = cubic_task.task.uri.get_file(h5_file)
    ion = neo.io.NeoHdf5IO(filename=h5_path)
    number_of_spike_trains = ion.get_info()['SpikeTrain']

    spiketrains = []

    for k in range(number_of_spike_trains):
        spiketrains.append(ion.get("/" + "SpikeTrain_" + str(k)))

    ion.close()

    psth_as = elephant.statistics.time_histogram(spiketrains,
                                                 binsize=binsize * pq.ms)

    result = cubic.cubic(psth_as, alpha=alpha)

    # Plot
    plt.bar(np.arange(0.75, len(result[1]) + .25, 1), result[1], width=.5)
    plt.axhline(alpha, ls='--', color='r')
    plt.xlabel('$\\xi$')
    plt.ylabel('P value')
    plt.title('$\hat\\xi$=' + str(result[0]))

    output_filename = os.path.splitext(h5_path)[0] + '.png'
    with open(output_filename, 'w') as result_pth:
        plt.savefig(result_pth, dpi=100)
    dst_name = os.path.basename(output_filename)
    return cubic_task.task.uri.save_file(mime_type='image/png',
                                         src_path=output_filename,
                                         dst_path=dst_name)
Ejemplo n.º 5
0
def cubic_task(h5_file, binsize, alpha):
    """
        Task Manifest Version: 1
        Full Name: cubic_task
        Caption: cubic
        Author: Elephant Developers
        Description: |
            Analyses the correlation of parallel recorded spike trains
        Categories:
            - FDAT
        Compatible_queues: ['cscs_viz', 'cscs_bgq', 'epfl_viz']
        Accepts:
            h5_file:
                type: application/unknown
                description: Input file that contains spiking data from a
                    HDF5 file.
            binsize:
                type: double
                description: Bin width used to compute the PSTH in ms.
            alpha:
                type: double
                description: The significance level of the test.

        Returns:
            res: image/png
    """
    h5_path = cubic_task.task.uri.get_file(h5_file)
    ion = neo.io.NeoHdf5IO(filename=h5_path)
    number_of_spike_trains = ion.get_info()['SpikeTrain']

    spiketrains = []

    for k in range(number_of_spike_trains):
        spiketrains.append(ion.get("/" + "SpikeTrain_" + str(k)))

    ion.close()

    psth_as = time_histogram(spiketrains, binsize=binsize * pq.ms)

    result = cubic.cubic(psth_as, alpha=alpha)

    # Plot
    plt.bar(np.arange(0.75, len(result[1]) + .25, 1), result[1], width=.5)
    plt.axhline(alpha, ls='--', color='r')
    plt.xlabel('$\\xi$')
    plt.ylabel('P value')
    plt.title('$\hat\\xi$=' + str(result[0]))

    output_filename = os.path.splitext(h5_path)[0] + '.png'
    with open(output_filename, 'w') as result_pth:
        plt.savefig(result_pth, dpi=100)
    dst_name = os.path.basename(output_filename)
    return cubic_task.task.uri.save_file(mime_type='image/png',
                                         src_path=output_filename,
                                         dst_path=dst_name)
Ejemplo n.º 6
0
    def test_cubic(self):

        # Computing the output of CuBIC for the test data AnalogSignal
        xi, p_vals, k, test_aborted = cubic.cubic(self.data_signal,
                                                  alpha=self.alpha)

        # Check the types of the outputs
        self.assertIsInstance(xi, int)
        self.assertIsInstance(p_vals, list)
        self.assertIsInstance(k, list)

        # Check that the number of tests is the output order of correlation
        self.assertEqual(xi, len(p_vals))

        # Check that all the first  xi-1 tests have not passed the
        # significance level alpha
        for p in p_vals[:-1]:
            self.assertGreater(self.alpha, p)

        # Check that the last p-value has passed the significance level
        self.assertGreater(p_vals[-1], self.alpha)

        # Check that the number of cumulant of the output is 3
        self.assertEqual(3, len(k))

        # Check the analytical constrain of the cumulants for which K_1<K_2
        self.assertGreater(k[1], k[0])

        # Check the computed order of correlation is the expected
        # from the test data
        self.assertEqual(xi, self.xi)

        # Computing the output of CuBIC for the test data Array
        xi, p_vals, k, test_aborted = cubic.cubic(self.data_array,
                                                  alpha=self.alpha)

        # Check the types of the outputs
        self.assertIsInstance(xi, int)
        self.assertIsInstance(p_vals, list)
        self.assertIsInstance(k, list)

        # Check that the number of tests is the output order of correlation
        self.assertEqual(xi, len(p_vals))

        # Check that all the first  xi-1 tests have not passed the
        # significance level alpha
        for p in p_vals[:-1]:
            self.assertGreater(self.alpha, p)

        # Check that the last p-value has passed the significance level
        self.assertGreater(p_vals[-1], self.alpha)

        # Check that the number of cumulant of the output is 3
        self.assertEqual(3, len(k))

        # Check the analytical constrain of the cumulants for which K_1<K_2
        self.assertGreater(k[1], k[0])

        # Check the computed order of correlation is the expected
        # from the test data
        self.assertEqual(xi, self.xi)

        # Check the output for test_aborted
        self.assertEqual(test_aborted, False)
Ejemplo n.º 7
0
    def test_cubic(self):

        # Computing the output of CuBIC for the test data AnalogSignal
        xi, p_vals, k, test_aborted = cubic.cubic(
            self.data_signal, alpha=self.alpha)

        # Check the types of the outputs
        self.assertIsInstance(xi, int)
        self.assertIsInstance(p_vals, list)
        self.assertIsInstance(k, list)

        # Check that the number of tests is the output order of correlation
        self.assertEqual(xi, len(p_vals))

        # Check that all the first  xi-1 tests have not passed the
        # significance level alpha
        for p in p_vals[:-1]:
            self.assertGreater(self.alpha, p)

        # Check that the last p-value has passed the significance level
        self.assertGreater(p_vals[-1], self.alpha)

        # Check that the number of cumulant of the output is 3
        self.assertEqual(3, len(k))

        # Check the analytical constrain of the cumulants for which K_1<K_2
        self.assertGreater(k[1], k[0])

        # Check the computed order of correlation is the expected
        # from the test data
        self.assertEqual(xi, self.xi)

        # Computing the output of CuBIC for the test data Array
        xi, p_vals, k, test_aborted = cubic.cubic(
            self.data_array, alpha=self.alpha)

        # Check the types of the outputs
        self.assertIsInstance(xi, int)
        self.assertIsInstance(p_vals, list)
        self.assertIsInstance(k, list)

        # Check that the number of tests is the output order of correlation
        self.assertEqual(xi, len(p_vals))

        # Check that all the first  xi-1 tests have not passed the
        # significance level alpha
        for p in p_vals[:-1]:
            self.assertGreater(self.alpha, p)

        # Check that the last p-value has passed the significance level
        self.assertGreater(p_vals[-1], self.alpha)

        # Check that the number of cumulant of the output is 3
        self.assertEqual(3, len(k))

        # Check the analytical constrain of the cumulants for which K_1<K_2
        self.assertGreater(k[1], k[0])

        # Check the computed order of correlation is the expected
        # from the test data
        self.assertEqual(xi, self.xi)

        # Check the output for test_aborted
        self.assertEqual(test_aborted, False)