Example #1
0
def train_HMM(n_components,
              observations,
              lengths,
              labels,
              verbose=False,
              cov_type='spherical',
              iter=500):
    AD_idx = np.where(labels == 1)
    AD_lengths = lengths[AD_idx[0]]

    nonAD_idx = np.where(labels != 1)
    nonAD_lengths = lengths[nonAD_idx[0]]

    l = np.nditer(labels)
    nonAD_observations = np.zeros((0, observations.shape[1]))
    AD_observations = np.zeros((0, observations.shape[1]))
    for s, e, in iter_from_X_lengths(observations, lengths):
        temp = observations[s:e, :]
        if l.next() == 0:
            nonAD_observations = np.vstack((nonAD_observations, temp))
        else:
            AD_observations = np.vstack((AD_observations, temp))

    AD_hmm = cf.setup_and_train(n_components, AD_observations, AD_lengths,
                                cov_type, verbose, iter)

    nonAD_hmm = cf.setup_and_train(n_components, nonAD_observations,
                                   nonAD_lengths, cov_type, verbose, iter)

    return AD_hmm, nonAD_hmm
Example #2
0
def test_HMM(ad_hmm, non_ad_hmm, observations, lengths):
    """ Test observation probabilities for both HMMs and decide on label """
    predicted = []
    for start, end in iter_from_X_lengths(observations, lengths):
        obs = observations[start:end, :]
        ad_score = ad_hmm.score(obs)
        non_ad_score = non_ad_hmm.score(obs)

        if ad_score > non_ad_score:
            predicted.append(1)
        else:
            predicted.append(0)

    return predicted
Example #3
0
from hmmlearn import hmm
import numpy as np
from common_functions import iter_from_X_lengths
from common_functions import random_ints_with_sum
np.random.seed(42)

model = hmm.GaussianHMM(n_components=3, covariance_type="full")
model.startprob_ = np.array([0.6, 0.3, 0.1])
model.transmat_ = np.array([[0.7, 0.2, 0.1],
                            [0.3, 0.5, 0.2],
                            [0.3, 0.3, 0.4]])

model.means_ = np.array([[0.0, 0.0], [3.0, -3.0], [5.0, 10.0]])
model.covars_ = np.tile(np.identity(2), (3, 1, 1))

for i in np.arange(1):
    obs, _ = model.sample(10000)
    model.fit(obs)
    N = 100

    obs, _ = model.sample(N)

    lengths = list(random_ints_with_sum(N))

    for i, j in iter_from_X_lengths(obs, lengths):
        observation = obs[i:j, :]
        logprob = model.score(observation)
        logprob1 = model.score(observation, [observation.shape[0]])
        print(logprob, logprob1)
        print(observation.shape[0], j - i)
Example #4
0
fpr_3 = {}
tpr_3 = {}

for f in SVM_folds:
    sensitivity_3[f] = [0] * (n_components[-1] + 1)
    specificity_3[f] = [0] * (n_components[-1] + 1)
    CM_3[f] = {}
    for n_comp in n_components:
        CM_3[f][n_comp] = np.zeros((2, 2))
""" Make train/test data """
train_obs_init, train_len_init, train_labels_init, MCI_obs, MCI_len, MCI_labels = \
    train_HMM.make_data()

starts = []
ends = []
for s, e in iter_from_X_lengths(MCI_obs, MCI_len):
    starts.append(s)
    ends.append(e)

if folds != 1:
    skf = StratifiedKFold(MCI_labels, 3)
else:
    skf = zip([-1], [-1])

for iteration in np.arange(iterations):
    f = 0
    for train, test in skf:
        if np.any(train == -1):
            train_len = train_len_init
            train_labels = train_labels_init
            train_obs = train_obs_init.copy()