Beispiel #1
0
def draw_lambdat(alpha0, beta0, Fs, K):
    ''' Draw new values for lambda (ARD) parameters. '''
    new_lambdas = numpy.empty(K)
    for k in range(0, K):
        alpha, beta = updates.alpha_beta_lambdat(alpha0=alpha0,
                                                 beta0=beta0,
                                                 Fs=Fs,
                                                 k=k)
        new_lambdas[k] = gamma_draw(alpha, beta)
    return new_lambdas
Beispiel #2
0
def draw_lambdaS(alphaS, betaS, S, nonnegative):
    ''' Draw new values for lambda (ARD) parameters. '''
    (K, L) = S.shape
    new_lambdaS = numpy.empty((K, L))
    alpha, beta = updates.alpha_beta_lambdaS(alphaS=alphaS,
                                             betaS=betaS,
                                             S=S,
                                             nonnegative=nonnegative)
    for k, l in itertools.product(range(0, K), range(0, L)):
        new_lambdaS[k, l] = gamma_draw(alpha[k, l], beta[k, l])
    return new_lambdaS
Beispiel #3
0
def init_lambdaS(init, K, L, alphaS, betaS):
    ''' Initialise the lambda^n_kl or lambda^m_kl parameters using the model definition. Init in ['random','exp']. '''
    options = ['random', 'exp']
    assert init in options, "Unknown initialisation option for element-wise sparsity lambda^S: %s. Options are %s." % (
        init, options)

    lambdaS = numpy.zeros((K, L))
    for k, l in itertools.product(range(0, K), range(0, L)):
        lambdaS[k, l] = gamma_draw(
            alphaS, betaS) if init == 'random' else alphaS / float(betaS)
    return lambdaS
Beispiel #4
0
def init_lambdak(init, K, alpha0, beta0):
    ''' Initialise the lambdak parameters using the model definition. Init in ['random','exp']. '''
    options = ['random', 'exp']
    assert init in options, "Unknown initialisation option for lambdak: %s. Options are %s." % (
        init, options)

    lambdak = numpy.zeros(K)
    for k in range(0, K):
        lambdak[k] = gamma_draw(
            alpha0, beta0) if init == 'random' else alpha0 / float(beta0)
    return lambdak
Beispiel #5
0
def draw_importance(alphaA, betaA, tau, dataset, mask, F, G, S=None):
    ''' Draw new values for alpha (dataset importance) parameter. '''
    alpha, beta = updates.alpha_beta_importance(alphaA=alphaA,
                                                betaA=betaA,
                                                tau=tau,
                                                dataset=dataset,
                                                mask=mask,
                                                F=F,
                                                G=G,
                                                S=S)
    importance = gamma_draw(alpha, beta)
    return importance
Beispiel #6
0
def draw_tau(alphatau, betatau, importance, dataset, mask, F, G, S=None):
    ''' Draw new values for tau (noise) parameter. '''
    alpha, beta = updates.alpha_beta_tau(alphatau=alphatau,
                                         betatau=betatau,
                                         importance=importance,
                                         dataset=dataset,
                                         mask=mask,
                                         F=F,
                                         G=G,
                                         S=S)
    tau = gamma_draw(alpha, beta)
    return tau
Beispiel #7
0
def init_tau(init, alphatau, betatau, importance, R, M, F, G, S=None):
    ''' Initialise the tau parameter using the model definition. Init in ['random','exp']. '''
    options = ['random', 'exp']
    assert init in options, "Unknown initialisation option for tau: %s. Options are %s." % (
        init, options)

    alpha, beta = updates_Gibbs.alpha_beta_tau(alphatau=alphatau,
                                               betatau=betatau,
                                               importance=importance,
                                               dataset=R,
                                               mask=M,
                                               F=F,
                                               G=G,
                                               S=S)

    return gamma_draw(alpha, beta) if init == 'random' else alpha / float(beta)
Beispiel #8
0
    def run(self, iterations):
        self.all_F = numpy.zeros((iterations, self.I, self.K))
        self.all_S = numpy.zeros((iterations, self.K, self.L))
        self.all_G = numpy.zeros((iterations, self.J, self.L))
        self.all_tau = numpy.zeros(iterations)
        self.all_times = []  # to plot performance against time

        metrics = ['MSE', 'R^2', 'Rp']
        self.all_performances = {}  # for plotting convergence of metrics
        for metric in metrics:
            self.all_performances[metric] = []

        time_start = time.time()
        for it in range(0, iterations):
            for k in range(0, self.K):
                tauFk = self.tauF(k)
                muFk = self.muF(tauFk, k)
                self.F[:, k] = TN_vector_draw(muFk, tauFk)

            for k, l in itertools.product(xrange(0, self.K), xrange(0,
                                                                    self.L)):
                tauSkl = self.tauS(k, l)
                muSkl = self.muS(tauSkl, k, l)
                self.S[k, l] = TN_draw(muSkl, tauSkl)

            for l in range(0, self.L):
                tauGl = self.tauG(l)
                muGl = self.muG(tauGl, l)
                self.G[:, l] = TN_vector_draw(muGl, tauGl)

            self.tau = gamma_draw(self.alpha_s(), self.beta_s())

            self.all_F[it], self.all_S[it], self.all_G[it], self.all_tau[
                it] = numpy.copy(self.F), numpy.copy(self.S), numpy.copy(
                    self.G), self.tau

            perf = self.predict_while_running()
            for metric in metrics:
                self.all_performances[metric].append(perf[metric])

            print "Iteration %s. MSE: %s. R^2: %s. Rp: %s." % (
                it + 1, perf['MSE'], perf['R^2'], perf['Rp'])

            time_iteration = time.time()
            self.all_times.append(time_iteration - time_start)

        return (self.all_F, self.all_S, self.all_G, self.all_tau)
Beispiel #9
0
    def run(self, iterations):
        self.all_U = numpy.zeros((iterations, self.I, self.K))
        self.all_V = numpy.zeros((iterations, self.J, self.K))
        self.all_tau = numpy.zeros(iterations)
        self.all_times = []  # to plot performance against time

        metrics = ['MSE', 'R^2', 'Rp']
        self.all_performances = {}  # for plotting convergence of metrics
        for metric in metrics:
            self.all_performances[metric] = []

        time_start = time.time()
        for it in range(0, iterations):
            for k in range(0, self.K):
                tauUk = self.tauU(k)
                muUk = self.muU(tauUk, k)
                self.U[:, k] = TN_vector_draw(muUk, tauUk)

            for k in range(0, self.K):
                tauVk = self.tauV(k)
                muVk = self.muV(tauVk, k)
                self.V[:, k] = TN_vector_draw(muVk, tauVk)

            self.tau = gamma_draw(self.alpha_s(), self.beta_s())

            self.all_U[it], self.all_V[it], self.all_tau[it] = numpy.copy(
                self.U), numpy.copy(self.V), self.tau

            perf = self.predict_while_running()
            for metric in metrics:
                self.all_performances[metric].append(perf[metric])

            print "Iteration %s. MSE: %s. R^2: %s. Rp: %s." % (
                it + 1, perf['MSE'], perf['R^2'], perf['Rp'])

            time_iteration = time.time()
            self.all_times.append(time_iteration - time_start)

        return (self.all_U, self.all_V, self.all_tau)