dur_distns = [ NegativeBinomialIntegerRVariantDuration(np.r_[0., 0, 0, 0, 0, 0, 1, 1, 1, 1], alpha_0=5., beta_0=5.) for state in range(library_size) ] hsmm = LibraryHSMMIntNegBinVariant(init_state_concentration=10., alpha=6., gamma=2., obs_distns=obs_distns, dur_distns=dur_distns) for data in training_datas: hsmm.add_data(data, left_censoring=True) for itr in progprint_xrange(resample_iter): hsmm.resample_model() ### degrade into HMM, use the same learned syllables! hmm = LibraryHMMFixedObs(init_state_concentration=10., alpha=6., gamma=2., obs_distns=hsmm.obs_distns) for data in training_datas: hmm.add_data(data) for itr in progprint_xrange(resample_iter): hmm.resample_model()
################ # build HSMM # ################ dur_distns = [NegativeBinomialIntegerRVariantDuration(np.r_[0.,0,0,0,0,1,1,1],alpha_0=5.,beta_0=5.) for state in range(library_size)] model = LibraryHSMMIntNegBinVariant( init_state_concentration=10., alpha=6.,gamma=6., obs_distns=obs_distns, dur_distns=dur_distns) for data in training_datas: model.add_data(data,left_censoring=True) # model.add_data_parallel(data,left_censoring=True) ################## # infer things # ################## train_likes = [] test_likes = [] for i in progprint_xrange(5): model.resample_model() # model.resample_model_parallel() train_likes.append(model.log_likelihood()) # test_likes.append(model.log_likelihood(test_data,left_censoring=True))
dur_distns = [ NegativeBinomialIntegerRVariantDuration(np.r_[0., 0, 0, 1, 1, 1, 1, 1], alpha_0=5., beta_0=5.) for state in range(library_size) ] model = LibraryHSMMIntNegBinVariant(init_state_concentration=10., alpha=6., gamma=6., obs_distns=obs_distns, dur_distns=dur_distns) ##################### # add_data timing # ##################### print 'this one should be slower!' tic = time.time() model.add_data(data) toc = time.time() print '...done in %f seconds' % (toc - tic) print '' print 'this one sholud be faster!' tic = time.time() model.add_data(data) toc = time.time() print '...done in %f seconds' % (toc - tic) print ''
for row in init_weights] ################ # build HSMM # ################ dur_distns = [NegativeBinomialIntegerRVariantDuration(np.r_[0.,0,0,1,1,1,1,1],alpha_0=5.,beta_0=5.) for state in range(library_size)] model = LibraryHSMMIntNegBinVariant( init_state_concentration=10., alpha=6.,gamma=6., obs_distns=obs_distns, dur_distns=dur_distns) model.add_data(data) ################## # infer things # ################## for i in progprint_xrange(50): model.resample_model() plt.figure() truemodel.plot() plt.gcf().suptitle('truth') plt.figure() model.plot() plt.gcf().suptitle('inferred')
dur_distns = [ NegativeBinomialIntegerRVariantDuration(np.r_[0., 0, 0, 0, 0, 1, 1, 1], alpha_0=5., beta_0=5.) for state in range(library_size) ] model = LibraryHSMMIntNegBinVariant(init_state_concentration=10., alpha=6., gamma=6., obs_distns=obs_distns, dur_distns=dur_distns) for data in training_datas: model.add_data(data, left_censoring=True) # model.add_data_parallel(data,left_censoring=True) ################## # infer things # ################## for i in progprint_xrange(25): model.resample_model() ################# # check likes # ################# computed_directly = model.log_likelihood(test_data, left_censoring=True)
################ models = collections.OrderedDict() ### HSMM dur_distns = [NegativeBinomialIntegerRVariantDuration(np.r_[0.,0,0,0,0,0,1,1,1,1],alpha_0=5.,beta_0=5.) for state in range(library_size)] hsmm = LibraryHSMMIntNegBinVariant( init_state_concentration=10., alpha=6.,gamma=2., obs_distns=obs_distns, dur_distns=dur_distns) for data in training_datas: hsmm.add_data(data,left_censoring=True) for itr in progprint_xrange(resample_iter): hsmm.resample_model() ### degrade into HMM, use the same learned syllables! hmm = LibraryHMMFixedObs( init_state_concentration=10., alpha=6.,gamma=2., obs_distns=hsmm.obs_distns) for data in training_datas: hmm.add_data(data) for itr in progprint_xrange(resample_iter): hmm.resample_model()