Пример #1
0
    def test_plot_stochastic_parameter(self):
        K_mean = 10 * 2.5 / 87
        K_std = numpy.min([K_mean - 0.0, 3.0 - K_mean]) / 6.0
        K = set_parameter("K", optimize_pdf=True, use="manual", K_lo=0.0, K_hi=3.0, K_pdf="lognormal",
                          K_pdf_params={"skew": 0.0, "mean": K_mean / K_std}, K_mean=K_mean,
                          K_std=K_std)
        figure_name = "K_parameter"
        figure_file = os.path.join(self.config.out.FOLDER_FIGURES, figure_name + ".png")
        assert not os.path.exists(figure_file)

        self.plotter.plot_stochastic_parameter(K, figure_name=figure_name)

        assert os.path.exists(figure_file)
Пример #2
0
 def _generate_parameters(self, **defaults):
     for p in ["x1eq_star", "K", "tau1", "tau0", "MCsplit", "MC", "eps"]:
         self.parameters.update({p: set_parameter(p, **defaults)})
Пример #3
0
# coding=utf-8

import numpy as np

from tvb_epilepsy.base.utils.log_error_utils import initialize_logger
from tvb_epilepsy.plot.plotter import Plotter
from tvb_epilepsy.service.stochastic_parameter_builder import set_parameter

logger = initialize_logger(__name__)

if __name__ == "__main__":
    plotter = Plotter()
    x0 = set_parameter("x0", optimize_pdf=True, use="manual", x0_lo=0.0, x0_hi=2.0, x0_pdf="lognormal",
                       x0_pdf_params={"skew": 0.0, "mean": 0.5 / 0.05}, x0_mean=0.5, x0_std=0.05)

    axes, fig = plotter.plot_stochastic_parameter(x0, np.arange(-0.01, 2.0, 0.01))

    # Testing for converting from symmetric matrix to two flattened columns and backwards:
    # a = np.array([[11, 12, 13, 14],
    #               [21, 22, 23, 24],
    #               [31, 32, 33, 34],
    #               [41, 42, 43, 44]])
    # b = np.stack([a[np.triu_indices(4, 1)], a.T[np.triu_indices(4, 1)]]).T
    # c = np.ones((4,4))
    # icon = -1
    # for ii in range(4):
    #     for jj in range(ii, 4):
    #         if (ii == jj):
    #             c[ii, jj] = 0
    #         else:
    #             icon += 1
Пример #4
0
def main_sampling_service(config=Config()):
    logger = initialize_logger(__name__, config.out.FOLDER_LOGS)

    n_samples = 100
    logger.info("\nDeterministic numpy.linspace sampling:")
    sampler = DeterministicSamplingService(n_samples=n_samples, grid_mode=True)
    samples, stats = sampler.generate_samples(low=1.0,
                                              high=2.0,
                                              shape=(2, ),
                                              stats=True)
    for key, value in stats.iteritems():
        logger.info("\n" + key + ": " + str(value))
    logger.info(sampler.__repr__())
    writer = H5Writer()
    writer.write_generic(sampler, config.out.FOLDER_RES,
                         "test_Stochastic_Sampler.h5")

    logger.info("\nStochastic uniform sampling with numpy:")
    sampler = StochasticSamplingService(n_samples=n_samples,
                                        sampling_module="numpy")
    #                                      a (low), b (high)
    samples, stats = sampler.generate_samples(
        parameter=(1.0, 2.0),
        probability_distribution=ProbabilityDistributionTypes.UNIFORM,
        shape=(2, ),
        stats=True)
    for key, value in stats.iteritems():
        logger.info("\n" + key + ": " + str(value))

    logger.info(sampler.__repr__())
    writer.write_generic(sampler, config.out.FOLDER_RES,
                         "test1_Stochastic_Sampler.h5")

    logger.info("\nStochastic truncated normal sampling with scipy:")
    sampler = StochasticSamplingService(n_samples=n_samples)
    #                                   loc (mean), scale (sigma)
    samples, stats = sampler.generate_samples(parameter=(1.5, 1.0),
                                              probability_distribution="norm",
                                              low=1,
                                              high=2,
                                              shape=(2, ),
                                              stats=True)
    for key, value in stats.iteritems():
        logger.info("\n" + key + ": " + str(value))
    logger.info(sampler.__repr__())
    writer.write_generic(sampler, config.out.FOLDER_RES,
                         "test2_Stochastic_Sampler.h5")

    logger.info("\nSensitivity analysis sampling:")
    sampler = SalibSamplingService(n_samples=n_samples, sampler="latin")
    samples, stats = sampler.generate_samples(low=1,
                                              high=2,
                                              shape=(2, ),
                                              stats=True)
    for key, value in stats.iteritems():
        logger.info("\n" + key + ": " + str(value))
    logger.info(sampler.__repr__())
    writer.write_generic(sampler, config.out.FOLDER_RES,
                         "test3_Stochastic_Sampler.h5")

    logger.info("\nTesting distribution class and conversions...")
    sampler = StochasticSamplingService(n_samples=n_samples)
    for distrib_name in ProbabilityDistributionTypes.available_distributions:
        logger.info("\n" + distrib_name)
        logger.info("\nmode/mean, std to distribution " + distrib_name + ":")
        if np.in1d(distrib_name, [
                ProbabilityDistributionTypes.EXPONENTIAL,
                ProbabilityDistributionTypes.CHISQUARE
        ]):
            target_stats = {"mean": 1.0}
            stats_m = "mean"
        elif np.in1d(distrib_name, [
                ProbabilityDistributionTypes.BERNOULLI,
                ProbabilityDistributionTypes.POISSON
        ]):
            target_stats = {"mean": np.ones((2, ))}
            stats_m = "mean"
        elif isequal_string(distrib_name,
                            ProbabilityDistributionTypes.BINOMIAL):
            target_stats = {"mean": 1.0, "std": 2.0}
            stats_m = "mean"
        else:
            if isequal_string(distrib_name,
                              ProbabilityDistributionTypes.UNIFORM):
                target_stats = {"mean": 1.0, "std": 2.0}
                stats_m = "mean"
            else:
                target_stats = {"mean": 1.0, "std": 2.0}
                stats_m = "mean"
        parameter1 = generate_stochastic_parameter(
            name="test1_" + distrib_name,
            low=0.0,
            high=2.0,
            p_shape=(2, 2),
            probability_distribution=distrib_name,
            optimize_pdf=True,
            use="manual",
            **target_stats)
        name2 = "test2_" + distrib_name
        defaults = set_parameter_defaults(name2,
                                          _pdf=distrib_name,
                                          _shape=(2, 2),
                                          _lo=0.0,
                                          _hi=2.0,
                                          **(deepcopy(target_stats)))
        parameter2 = set_parameter(name=name2, use="manual", **defaults)
        for parameter in (parameter1, parameter2):
            logger.info(str(parameter))
            samples = sampler.generate_samples(parameter=parameter, stats=True)
            for key, value in stats.iteritems():
                logger.info("\n" + key + ": " + str(value))
            diff = target_stats[stats_m] - stats[stats_m]
            if np.any(np.abs(diff.flatten()) > 0.001):
                logger.warning(
                    "Large difference between target and resulting samples' " +
                    stats_m + "!: " + str(diff))
            del parameter
Пример #5
0
 def _add_parameters(self, **defaults):
     for p in ["dX1t", "dZt", "sig"]:
         self.parameters.update({p: set_parameter(p, **defaults)})
Пример #6
0
 def __add_parameters(self, **defaults):
     for p in [
             "x1init", "zinit", "sig_init", "scale_signal", "offset_signal"
     ]:
         self.parameters.update({p: set_parameter(p, **defaults)})