def proposal(x):
   """
   This is symmetric proposal PDF to plot the sample path
   """
#   return cauchy.rvs(loc=x, scale=0.15, size=1)
#   return norm.rvs(loc=x, scale=0.2, size=1)
   return logistic.rvs(loc=x, scale=1, size=1)
    def generateSamples(self):

        if self.sourceType == 'white':
            self.data = np.append(self.data, np.random.normal(size=self.sampleGain))
        elif self.sourceType == 'const':
            self.data = np.append(self.data, np.zeros(self.sampleGain))
        elif self.sourceType == 'logistic':
            self.data = np.append(self.data, logistic.rvs(size=self.sampleGain))
        self.transformedData = self.transformation(self.data)
Exemple #3
0
def logistic_distribution(par_size, par_scale):
    logis = logistic.rvs(size=par_size, scale=par_scale)
    x_logistic = []
    logistic_data = []
    for i in range(0, len(logis)):
        x_logistic.append(i)
        logistic_data.append([i, logis[i]])
    print(logistic_data)

    plt.plot(x_logistic, logis)
    plt.show()
    return logistic_data
Exemple #4
0
 def gen_sample(self, n):
     return logistic.rvs(loc=self.mu, scale=self.sigma, size=n)
    res = minimize(training_loss,
                   jac=training_loss_jac,
                   x0=fit_params,
                   method="BFGS",
                   options={
                       "maxiter": maxiter,
                       "gtol": tol
                   })
    print(res)
    final_params = res.x
    for i in range(num):
        final_params[2 * i + 1] = np.exp(final_params[2 * i + 1])
    results = []
    for i in range(num):
        results.append(final_params[2 * i])
        results.append(
            logistic.isf(
                0.25, loc=final_params[2 * i], scale=final_params[2 * i + 1]) -
            final_params[2 * i])

    for i in range(num - 1):
        results.append(final_params[2 * num + i])

    return results


data = logistic.rvs(loc=10, scale=5, size=200)
print(estimate(data, 2))
print("True likelihood:", log_likelihood_logistic(data, [10, np.log(5)]))
Exemple #6
0
def bootstrap(a,
              f=None,
              b=100,
              method="balanced",
              family=None,
              strata=None,
              smooth=False,
              random_state=None):
    """
    Calculate function values from bootstrap samples or
    optionally return bootstrap samples themselves

    Parameters
    ----------
    a : array-like
        Original sample
    f : callable or None
        Function to be bootstrapped
    b : int
        Number of bootstrap samples
    method : string
        * 'ordinary'
        * 'balanced'
        * 'parametric'
    family : string or None
        * 'gaussian'
        * 't'
        * 'laplace'
        * 'logistic'
        * 'F'
        * 'gamma'
        * 'log-normal'
        * 'inverse-gaussian'
        * 'pareto'
        * 'beta'
        * 'poisson'
    strata : array-like or None
        Stratification labels, ignored when method
        is parametric
    smooth : boolean
        Whether or not to add noise to bootstrap
        samples, ignored when method is parametric
    random_state : int or None
        Random number seed

    Returns
    -------
    y | X : np.array
        Function applied to each bootstrap sample
        or bootstrap samples if f is None
    """
    np.random.seed(random_state)
    a = np.asarray(a)
    n = len(a)

    # stratification not meaningful for parametric sampling
    if strata is not None and (method != "parametric"):
        strata = np.asarray(strata)
        if len(strata) != len(a):
            raise ValueError("a and strata must have" " the same length")
        # recursively call bootstrap without stratification
        # on the different strata
        masks = [strata == x for x in np.unique(strata)]
        boot_strata = [
            bootstrap(a=a[m],
                      f=None,
                      b=b,
                      method=method,
                      strata=None,
                      random_state=random_state) for m in masks
        ]
        # concatenate resampled strata along first column axis
        X = np.concatenate(boot_strata, axis=1)
    else:
        if method == "ordinary":
            # i.i.d. sampling from ecdf of a
            X = np.reshape(a[np.random.choice(range(a.shape[0]),
                                              a.shape[0] * b)],
                           newshape=(b, ) + a.shape)
        elif method == "balanced":
            # permute b concatenated copies of a
            r = np.reshape([a] * b, newshape=(b * a.shape[0], ) + a.shape[1:])
            X = np.reshape(r[np.random.permutation(range(r.shape[0]))],
                           newshape=(b, ) + a.shape)
        elif method == "parametric":
            if len(a.shape) > 1:
                raise ValueError("a must be one-dimensional")

            # fit parameters by maximum likelihood and sample
            if family == "gaussian":
                theta = norm.fit(a)
                arr = norm.rvs(size=n * b,
                               loc=theta[0],
                               scale=theta[1],
                               random_state=random_state)
            elif family == "t":
                theta = t.fit(a, fscale=1)
                arr = t.rvs(size=n * b,
                            df=theta[0],
                            loc=theta[1],
                            scale=theta[2],
                            random_state=random_state)
            elif family == "laplace":
                theta = laplace.fit(a)
                arr = laplace.rvs(size=n * b,
                                  loc=theta[0],
                                  scale=theta[1],
                                  random_state=random_state)
            elif family == "logistic":
                theta = logistic.fit(a)
                arr = logistic.rvs(size=n * b,
                                   loc=theta[0],
                                   scale=theta[1],
                                   random_state=random_state)
            elif family == "F":
                theta = F.fit(a, floc=0, fscale=1)
                arr = F.rvs(size=n * b,
                            dfn=theta[0],
                            dfd=theta[1],
                            loc=theta[2],
                            scale=theta[3],
                            random_state=random_state)
            elif family == "gamma":
                theta = gamma.fit(a, floc=0)
                arr = gamma.rvs(size=n * b,
                                a=theta[0],
                                loc=theta[1],
                                scale=theta[2],
                                random_state=random_state)
            elif family == "log-normal":
                theta = lognorm.fit(a, floc=0)
                arr = lognorm.rvs(size=n * b,
                                  s=theta[0],
                                  loc=theta[1],
                                  scale=theta[2],
                                  random_state=random_state)
            elif family == "inverse-gaussian":
                theta = invgauss.fit(a, floc=0)
                arr = invgauss.rvs(size=n * b,
                                   mu=theta[0],
                                   loc=theta[1],
                                   scale=theta[2],
                                   random_state=random_state)
            elif family == "pareto":
                theta = pareto.fit(a, floc=0)
                arr = pareto.rvs(size=n * b,
                                 b=theta[0],
                                 loc=theta[1],
                                 scale=theta[2],
                                 random_state=random_state)
            elif family == "beta":
                theta = beta.fit(a)
                arr = beta.rvs(size=n * b,
                               a=theta[0],
                               b=theta[1],
                               loc=theta[2],
                               scale=theta[3],
                               random_state=random_state)
            elif family == "poisson":
                theta = np.mean(a)
                arr = poisson.rvs(size=n * b,
                                  mu=theta,
                                  random_state=random_state)
            else:
                raise ValueError("Invalid family")

            X = np.reshape(arr, newshape=(b, n))
        else:
            raise ValueError("method must be either 'ordinary'"
                             " , 'balanced', or 'parametric',"
                             " '{method}' was supplied".format(method=method))

    # samples are already smooth in the parametric case
    if smooth and (method != "parametric"):
        X += np.random.normal(size=X.shape, scale=1 / np.sqrt(n))

    if f is None:
        return X
    else:
        return np.asarray([f(x) for x in X])
# Display the probability density function (``pdf``):

x = np.linspace(logistic.ppf(0.01), logistic.ppf(0.99), 100)
ax.plot(x, logistic.pdf(x), 'r-', lw=5, alpha=0.6, label='logistic pdf')

# Alternatively, the distribution object can be called (as a function)
# to fix the shape, location and scale parameters. This returns a "frozen"
# RV object holding the given parameters fixed.

# Freeze the distribution and display the frozen ``pdf``:

rv = logistic()
ax.plot(x, rv.pdf(x), 'k-', lw=2, label='frozen pdf')

# Check accuracy of ``cdf`` and ``ppf``:

vals = logistic.ppf([0.001, 0.5, 0.999])
np.allclose([0.001, 0.5, 0.999], logistic.cdf(vals))
# True

# Generate random numbers:

r = logistic.rvs(size=1000)

# And compare the histogram:

ax.hist(r, density=True, histtype='stepfilled', alpha=0.2)
ax.legend(loc='best', frameon=False)
plt.show()