1],
                                            alpha_0=5.,
                                            beta_0=5.)
    for state in range(library_size)
]

hsmm = LibraryHSMMIntNegBinVariant(init_state_concentration=10.,
                                   alpha=6.,
                                   gamma=2.,
                                   obs_distns=obs_distns,
                                   dur_distns=dur_distns)
for data in training_datas:
    hsmm.add_data(data, left_censoring=True)

for itr in progprint_xrange(resample_iter):
    hsmm.resample_model()

### degrade into HMM, use the same learned syllables!

hmm = LibraryHMMFixedObs(init_state_concentration=10.,
                         alpha=6.,
                         gamma=2.,
                         obs_distns=hsmm.obs_distns)
for data in training_datas:
    hmm.add_data(data)

for itr in progprint_xrange(resample_iter):
    hmm.resample_model()

### degrade into GMM, use the same learned syllables!
示例#2
0
model = LibraryHSMMIntNegBinVariant(init_state_concentration=10.,
                                    alpha=6.,
                                    gamma=6.,
                                    obs_distns=obs_distns,
                                    dur_distns=dur_distns)

for data in training_datas:
    model.add_data(data, left_censoring=True)
    # model.add_data_parallel(data,left_censoring=True)

##################
#  infer things  #
##################

for i in progprint_xrange(25):
    model.resample_model()

#################
#  check likes  #
#################

computed_directly = model.log_likelihood(test_data, left_censoring=True)

# NOTE: this is like model.predictive_likelihoods(test_data,[1]) but it includes
# the first frame p(y_1) term instead of just starting at p(y_2|y_1)
s = model._states_class(model=model,
                        data=test_data,
                        stateseq=np.zeros(len(test_data)),
                        left_censoring=True)
alphal = s.messages_forwards()
cmaxes = alphal.max(axis=1)
        for state in range(library_size)]

model = LibraryHSMMIntNegBinVariant(
        init_state_concentration=10.,
        alpha=6.,gamma=6.,
        obs_distns=obs_distns,
        dur_distns=dur_distns)

for data in training_datas:
    model.add_data(data,left_censoring=True)
    # model.add_data_parallel(data,left_censoring=True)

##################
#  infer things  #
##################

train_likes = []
test_likes = []

for i in progprint_xrange(5):
    model.resample_model()
    # model.resample_model_parallel()
    train_likes.append(model.log_likelihood())
    # test_likes.append(model.log_likelihood(test_data,left_censoring=True))

newmodel = model.unfreeze()

for i in progprint_xrange(5):
    newmodel.resample_model()

### HSMM

dur_distns = [NegativeBinomialIntegerRVariantDuration(np.r_[0.,0,0,0,0,0,1,1,1,1],alpha_0=5.,beta_0=5.)
        for state in range(library_size)]

hsmm = LibraryHSMMIntNegBinVariant(
        init_state_concentration=10.,
        alpha=6.,gamma=2.,
        obs_distns=obs_distns,
        dur_distns=dur_distns)
for data in training_datas:
    hsmm.add_data(data,left_censoring=True)

for itr in progprint_xrange(resample_iter):
    hsmm.resample_model()

### degrade into HMM, use the same learned syllables!

hmm = LibraryHMMFixedObs(
        init_state_concentration=10.,
        alpha=6.,gamma=2.,
        obs_distns=hsmm.obs_distns)
for data in training_datas:
    hmm.add_data(data)

for itr in progprint_xrange(resample_iter):
    hmm.resample_model()

### degrade into GMM, use the same learned syllables!