Beispiel #1
0
    def __init__(self,
                 data,
                 sim,
                 prior_function,
                 perturbation_kernel=None,
                 summaries_function=bs.Burstiness().compute,
                 distance_function=euc.EuclideanDistance(),
                 summaries_divisor=None,
                 use_logger=False):

        self.name = 'SMC-ABC'
        super(SMCABC, self).__init__(self.name, data, sim, use_logger)

        self.prior_function = prior_function
        self.summaries_function = summaries_function
        self.distance_function = distance_function
        self.summaries_divisor = summaries_divisor
        if perturbation_kernel is not None:
            self.perturbation_kernel = perturbation_kernel
        else:
            self.perturbation_kernel = MultivariateNormalKernel(
                d=self.prior_function.get_dimension(), adapt=True)

        if self.use_logger:
            self.logger = ml.SciopeLogger().get_logger()
            self.logger.info(
                "Sequential Monte-Carlo Approximate Bayesian Computation initialized"
            )
 def compute(data):
     ensemble = []
     ensemble.append(bs.Burstiness(mean_trajectories=False).compute(data))
     ensemble.append(mx.GlobalMax().compute(data))
     ensemble.append(mn.GlobalMin().compute(data))
     ensemble.append(tm.TemporalMean().compute(data))
     ensemble.append(tv.TemporalVariance().compute(data))
     return np.asarray(ensemble).reshape(1, len(ensemble))
Beispiel #3
0
    def __init__(self,
                 data,
                 sim,
                 prior_function,
                 epsilon=0.1,
                 summaries_function=bs.Burstiness(),
                 distance_function=euc.EuclideanDistance(),
                 summaries_divisor=None,
                 use_logger=False):
        """
        ABC class for rejection sampling
        
        Parameters
        ----------
        data : nd-array
            the observed / fixed dataset
        sim : nd-array
            the simulated dataset or simulator function
        prior_function : sciope.utilities.priors object
            the prior function generating candidate samples
        epsilon : float, optional
            tolerance bound, by default 0.1
        summaries_function : sciope.utilities.summarystats object, optional
            function calculating summary stats over simulated results; by default bs.Burstiness()
        distance_function : sciope.utilities.distancefunctions object, optional
            distance function operating over summary statistics - calculates deviation between observed and simulated
            data; by default euc.EuclideanDistance()
        summaries_divisor : 1D numpy array, optional
            instead of normalizing using division by current known max of each statistic, use the supplied division
            factors. These may come from prior knowledge, or pre-studies, etc.
        use_logger : bool
            enable/disable logging
        """
        self.name = 'ABC'
        self.epsilon = epsilon
        self.summaries_function = summaries_function
        self.prior_function = prior_function.draw
        self.distance_function = distance_function.compute
        self.historical_distances = []
        self.summaries_divisor = summaries_divisor
        self.use_logger = use_logger
        super(ABC, self).__init__(self.name, data, sim, self.use_logger)
        self.sim = sim

        if self.use_logger:
            self.logger = ml.SciopeLogger().get_logger()
            self.logger.info("Approximate Bayesian Computation initialized")
Beispiel #4
0
 def __init__(self,
              data,
              sim,
              prior_function,
              mab_variant=md.MABDirect(arm_pull),
              k=1,
              epsilon=0.1,
              parallel_mode=True,
              summaries_function=bs.Burstiness(),
              distance_function=euc.EuclideanDistance()):
     super().__init__(data, sim, prior_function, epsilon, parallel_mode,
                      summaries_function, distance_function)
     self.name = 'BanditsABC'
     self.mab_variant = mab_variant
     self.k = k
     logger.info(
         "Multi-Armed Bandits Approximate Bayesian Computation initialized")
Beispiel #5
0
 def __init__(self,
              data,
              sim,
              prior_function,
              epsilon=0.1,
              parallel_mode=True,
              summaries_function=bs.Burstiness(),
              distance_function=euc.EuclideanDistance()):
     self.name = 'ABC'
     self.epsilon = epsilon
     self.summaries_function = summaries_function
     self.prior_function = prior_function
     self.distance_function = distance_function
     self.parallel_mode = parallel_mode
     self.historical_distances = []
     super(ABC, self).__init__(self.name, data, sim)
     logger.info("Approximate Bayesian Computation initialized")
Beispiel #6
0
from sciope.utilities.priors import uniform_prior
from sciope.inference import abc_inference
from sciope.utilities.summarystats import burstiness as bs
import numpy as np
import vilar
from sklearn.metrics import mean_absolute_error

# Load data
data = np.loadtxt("datasets/vilar_dataset_specieA_100trajs_150time.dat",
                  delimiter=",")

# Set up the prior
dmin = [30, 200, 0, 30, 30, 1, 1, 0, 0, 0, 0.5, 0.5, 1, 30, 80]
dmax = [70, 600, 1, 70, 70, 10, 12, 1, 2, 0.5, 1.5, 1.5, 3, 70, 120]
mm_prior = uniform_prior.UniformPrior(np.asarray(dmin), np.asarray(dmax))
bs_stat = bs.Burstiness(mean_trajectories=False)

# Set up ABC
abc_instance = abc_inference.ABC(data,
                                 vilar.simulate,
                                 epsilon=0.1,
                                 prior_function=mm_prior,
                                 summaries_function=bs_stat)

# Perform ABC; require 30 samples
abc_instance.infer(30)

# Results
true_params = [[
    50.0, 100.0, 50.0, 500.0, 0.01, 50.0, 50.0, 5.0, 1.0, 10.0, 0.5, 0.2, 1.0,
    2.0, 1.0