Beispiel #1
0
def fit_spindle_refractory():
    data = [[88, 317], [118, 99], [125, 93], [131, 97], [137, 115], [144, 143],
            [151, 194], [158, 223], [175, 245], [197, 265], [239, 287],
            [285, 297], [355, 304], [432, 307], [454, 313]]
    xscale = [0, 30]
    yscale = [0, 0.08]
    data_df = get_target_curve(data, xscale, yscale, scale=False)
    sample_data = np.random.choice(a=data_df['x'], p=data_df['y'], size=1000)
    with pm.Model() as model:
        a = pm.HalfNormal('a', 100 * 10)
        b = pm.HalfNormal('b', 100 * 10)
        pm.Wald('spindle_duration', mu=a, lam=b, observed=sample_data)
        trace = pm.sample(2000, njobs=1)
    summary_df = pm.summary(trace)
    a_est = summary_df.loc['a', 'mean']
    b_est = summary_df.loc['b', 'mean']
    n_samples = 10000
    with pm.Model() as model:
        pm.Wald('spindle_density_mean_params', mu=a_est, lam=b_est)
        outcome = pm.sample(n_samples, njobs=1, nchains=1)
    # pm.traceplot(trace)
    # plt.show()
    samples = outcome['spindle_density_mean_params']
    sns.distplot(samples, kde=True, bins=100)
    x = data_df['x']
    y = data_df['y'] * len(samples) * (x[1] - x[0])
    sns.lineplot(x, y)
    plt.show()
    print(summary_df)
    return samples * 30 + 0.5
Beispiel #2
0
    def _sample_pymc3(cls, dist, size):
        """Sample from PyMC3."""

        import pymc3
        pymc3_rv_map = {
            'BetaDistribution': lambda dist:
                pymc3.Beta('X', alpha=float(dist.alpha), beta=float(dist.beta)),
            'CauchyDistribution': lambda dist:
                pymc3.Cauchy('X', alpha=float(dist.x0), beta=float(dist.gamma)),
            'ChiSquaredDistribution': lambda dist:
                pymc3.ChiSquared('X', nu=float(dist.k)),
            'ExponentialDistribution': lambda dist:
                pymc3.Exponential('X', lam=float(dist.rate)),
            'GammaDistribution': lambda dist:
                pymc3.Gamma('X', alpha=float(dist.k), beta=1/float(dist.theta)),
            'LogNormalDistribution': lambda dist:
                pymc3.Lognormal('X', mu=float(dist.mean), sigma=float(dist.std)),
            'NormalDistribution': lambda dist:
                pymc3.Normal('X', float(dist.mean), float(dist.std)),
            'GaussianInverseDistribution': lambda dist:
                pymc3.Wald('X', mu=float(dist.mean), lam=float(dist.shape)),
            'ParetoDistribution': lambda dist:
                pymc3.Pareto('X', alpha=float(dist.alpha), m=float(dist.xm)),
            'UniformDistribution': lambda dist:
                pymc3.Uniform('X', lower=float(dist.left), upper=float(dist.right))
        }

        dist_list = pymc3_rv_map.keys()

        if dist.__class__.__name__ not in dist_list:
            return None

        with pymc3.Model():
            pymc3_rv_map[dist.__class__.__name__](dist)
            return pymc3.sample(size, chains=1, progressbar=False)[:]['X']
Beispiel #3
0
def fit_refractory_minus_duration():
    sample_data = pd.read_pickle(
        '../data/raw/refractory_prior_samples.pkl')['samples'].values
    with pm.Model() as model:
        a = pm.HalfNormal('a', 100 * 10)
        b = pm.HalfNormal('b', 100 * 10)
        pm.Wald('prior', mu=a, lam=b, observed=sample_data)
        trace = pm.sample(2000, njobs=1)
    summary_df = pm.summary(trace)
    a_est = summary_df.loc['a', 'mean']
    b_est = summary_df.loc['b', 'mean']
    n_samples = 10000
    with pm.Model() as model:
        pm.Wald('prior_check', mu=a_est, lam=b_est)
        outcome = pm.sample(n_samples, njobs=1, nchains=1)

    samples = outcome['prior_check']
    sns.distplot(samples, kde=True)
    sns.distplot(sample_data, kde=True)
    plt.show()
    print(summary_df)