def _test11(): # A reduced version of test10 ret = "" # GmmMgr setup num_states = 2 dimension = 2 models = [] for i in xrange(num_states): dm = DummyModel(dimension, 1.0) models.append(dm) gmm_mgr = GmmMgr(models) gb = GraphBuilder() node_id0 = gb.new_node((0, 0)) node_id1 = gb.new_node((1, 1)) node_id2 = gb.new_node((2, 1)) node_id3 = gb.new_node((3, 1)) node_id4 = gb.new_node((4, 2)) # The topology here is slightly complex than the previous example arc_id = gb.new_arc(node_id0, node_id1) arc_id = gb.new_arc(node_id1, node_id4) arc_id = gb.new_arc(node_id0, node_id2) arc_id = gb.new_arc(node_id2, node_id3) arc_id = gb.new_arc(node_id3, node_id4) arc_id = gb.new_arc(node_id2, node_id4) gr0 = FrozenGraph(gb) # Make two Hmms with 3 states and order 2 (self loop, forward 1) # The models in the middle are special and can skip. seed(0) hmm0 = make_forward_hmm(gmm_mgr, num_states, order=2, exact=False) hmm1 = Hmm(1) trans = array(((0.0, 0.5, 0.5), (0.0, 0.5, 0.5), (0.0, 0.0, 0.0))) hmm1.build_model(gmm_mgr, (0, ), 1, 1, trans) hmm2 = make_forward_hmm(gmm_mgr, num_states, order=2, exact=True) hmm_mgr = HmmMgr((hmm0, hmm1, hmm2)) spd = {} spd[(0, 1)] = (0.4, ) spd[(0, 2)] = (0.6, ) spd[(2, 3)] = (0.4, ) spd[(2, 4)] = (0.6, ) tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=spd) if do_display: tg0.dot_display() tg0.dot_display(expand_hmms=True) with DebugPrint("bwt_ctsh") if True else DebugPrint(): result_hmm = tg0.convert_to_standalone_hmm() ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string( full=True) return ret
def _test11(): # A reduced version of test10 ret = "" # GmmMgr setup num_states = 2 dimension = 2 models = [] for i in xrange(num_states): dm = DummyModel(dimension, 1.0) models.append(dm) gmm_mgr = GmmMgr(models) gb = GraphBuilder() node_id0 = gb.new_node((0, 0)) node_id1 = gb.new_node((1, 1)) node_id2 = gb.new_node((2, 1)) node_id3 = gb.new_node((3, 1)) node_id4 = gb.new_node((4, 2)) # The topology here is slightly complex than the previous example arc_id = gb.new_arc(node_id0, node_id1) arc_id = gb.new_arc(node_id1, node_id4) arc_id = gb.new_arc(node_id0, node_id2) arc_id = gb.new_arc(node_id2, node_id3) arc_id = gb.new_arc(node_id3, node_id4) arc_id = gb.new_arc(node_id2, node_id4) gr0 = FrozenGraph(gb) # Make two Hmms with 3 states and order 2 (self loop, forward 1) # The models in the middle are special and can skip. seed(0) hmm0 = make_forward_hmm(gmm_mgr, num_states, order=2, exact=False) hmm1 = Hmm(1) trans = array(((0.0, 0.5, 0.5), (0.0, 0.5, 0.5), (0.0, 0.0, 0.0))) hmm1.build_model(gmm_mgr, (0,), 1, 1, trans) hmm2 = make_forward_hmm(gmm_mgr, num_states, order=2, exact=True) hmm_mgr = HmmMgr((hmm0, hmm1, hmm2)) spd = {} spd[(0, 1)] = (0.4,) spd[(0, 2)] = (0.6,) spd[(2, 3)] = (0.4,) spd[(2, 4)] = (0.6,) tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=spd) if do_display: tg0.dot_display() tg0.dot_display(expand_hmms=True) with DebugPrint("bwt_ctsh") if True else DebugPrint(): result_hmm = tg0.convert_to_standalone_hmm() ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string(full=True) return ret
def _test9(): # Like test8, but now HMMs have multiple inputs and outputs. ret = "" # GmmMgr setup num_states = 3 dimension = 2 models = [] for i in xrange(num_states): dm = DummyModel(dimension, 1.0) models.append(dm) gmm_mgr = GmmMgr(models) gb = GraphBuilder() node_id0 = gb.new_node((0, 0)) node_id1 = gb.new_node((1, 1)) node_id2 = gb.new_node((2, 1)) node_id3 = gb.new_node((3, 1)) node_id4 = gb.new_node((4, 1)) node_id5 = gb.new_node((5, 2)) arc_id = gb.new_arc(node_id0, node_id1) arc_id = gb.new_arc(node_id1, node_id2) arc_id = gb.new_arc(node_id2, node_id3) arc_id = gb.new_arc(node_id3, node_id4) arc_id = gb.new_arc(node_id4, node_id5) gr0 = FrozenGraph(gb) # Make two Hmms with 3 states and order 3 (self loop, forward 1, forward 2) # The models in the middle are special and can skip directly seed(0) hmm0 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm1 = Hmm(1) trans = array( ( (0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.5, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5), (0.0, 0.0, 0.0, 0.5, 0.35, 0.1, 0.05), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), ) ) hmm1.build_model(gmm_mgr, (0,), 3, 3, trans) hmm2 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm_mgr = HmmMgr((hmm0, hmm1, hmm2)) with DebugPrint("bwt_vrfy") if False else DebugPrint(): tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=dict()) result_hmm = tg0.convert_to_standalone_hmm() ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string(full=True) return ret
def _test9(): # Like test8, but now HMMs have multiple inputs and outputs. ret = "" # GmmMgr setup num_states = 3 dimension = 2 models = [] for i in xrange(num_states): dm = DummyModel(dimension, 1.0) models.append(dm) gmm_mgr = GmmMgr(models) gb = GraphBuilder() node_id0 = gb.new_node((0, 0)) node_id1 = gb.new_node((1, 1)) node_id2 = gb.new_node((2, 1)) node_id3 = gb.new_node((3, 1)) node_id4 = gb.new_node((4, 1)) node_id5 = gb.new_node((5, 2)) arc_id = gb.new_arc(node_id0, node_id1) arc_id = gb.new_arc(node_id1, node_id2) arc_id = gb.new_arc(node_id2, node_id3) arc_id = gb.new_arc(node_id3, node_id4) arc_id = gb.new_arc(node_id4, node_id5) gr0 = FrozenGraph(gb) # Make two Hmms with 3 states and order 3 (self loop, forward 1, forward 2) # The models in the middle are special and can skip directly seed(0) hmm0 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm1 = Hmm(1) trans = array(((0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.5, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5), (0.0, 0.0, 0.0, 0.5, 0.35, 0.1, 0.05), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0))) hmm1.build_model(gmm_mgr, (0, ), 3, 3, trans) hmm2 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm_mgr = HmmMgr((hmm0, hmm1, hmm2)) with DebugPrint("bwt_vrfy") if False else DebugPrint(): tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=dict()) result_hmm = tg0.convert_to_standalone_hmm() ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string( full=True) return ret
def _test10(): # Like test9, but now HMMs are arranged in a diamond pattern so inter-HMM # probabilities come into play ret = "" # GmmMgr setup num_states = 3 dimension = 2 models = [] for i in xrange(num_states): dm = DummyModel(dimension, 1.0) models.append(dm) gmm_mgr = GmmMgr(models) gb = GraphBuilder() node_id0 = gb.new_node((0, 0)) node_id1 = gb.new_node((1, 1)) node_id2 = gb.new_node((2, 1)) node_id3 = gb.new_node((3, 1)) node_id4 = gb.new_node((4, 1)) node_id5 = gb.new_node((5, 2)) # The topology here is more complex than previous examples arc_id = gb.new_arc(node_id0, node_id1) arc_id = gb.new_arc(node_id1, node_id5) arc_id = gb.new_arc(node_id0, node_id2) arc_id = gb.new_arc(node_id2, node_id3) arc_id = gb.new_arc(node_id3, node_id4) arc_id = gb.new_arc(node_id3, node_id5) arc_id = gb.new_arc(node_id4, node_id5) gr0 = FrozenGraph(gb) # Make two Hmms with 3 states and order 3 (self loop, forward 1, forward 2) # The models in the middle are special and can skip. seed(0) hmm0 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm1 = Hmm(1) trans = array( ( (0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.5, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5), (0.0, 0.0, 0.0, 0.5, 0.35, 0.1, 0.05), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), ) ) hmm1.build_model(gmm_mgr, (0,), 3, 3, trans) hmm2 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm_mgr = HmmMgr((hmm0, hmm1, hmm2)) spd = {} spd[(0, 1)] = (0.4, 0.3, 0.8) spd[(0, 2)] = (0.6, 0.7, 0.2) spd[(3, 4)] = (0.4, 0.3, 0.8) spd[(3, 5)] = (0.6, 0.7, 0.2) tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=spd) with DebugPrint("bwt_ctsh") if True else DebugPrint(): result_hmm = tg0.convert_to_standalone_hmm() ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string(full=True) return ret
def _test10(): # Like test9, but now HMMs are arranged in a diamond pattern so inter-HMM # probabilities come into play ret = "" # GmmMgr setup num_states = 3 dimension = 2 models = [] for i in xrange(num_states): dm = DummyModel(dimension, 1.0) models.append(dm) gmm_mgr = GmmMgr(models) gb = GraphBuilder() node_id0 = gb.new_node((0, 0)) node_id1 = gb.new_node((1, 1)) node_id2 = gb.new_node((2, 1)) node_id3 = gb.new_node((3, 1)) node_id4 = gb.new_node((4, 1)) node_id5 = gb.new_node((5, 2)) # The topology here is more complex than previous examples arc_id = gb.new_arc(node_id0, node_id1) arc_id = gb.new_arc(node_id1, node_id5) arc_id = gb.new_arc(node_id0, node_id2) arc_id = gb.new_arc(node_id2, node_id3) arc_id = gb.new_arc(node_id3, node_id4) arc_id = gb.new_arc(node_id3, node_id5) arc_id = gb.new_arc(node_id4, node_id5) gr0 = FrozenGraph(gb) # Make two Hmms with 3 states and order 3 (self loop, forward 1, forward 2) # The models in the middle are special and can skip. seed(0) hmm0 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm1 = Hmm(1) trans = array(((0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.5, 0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5), (0.0, 0.0, 0.0, 0.5, 0.35, 0.1, 0.05), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0))) hmm1.build_model(gmm_mgr, (0, ), 3, 3, trans) hmm2 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True) hmm_mgr = HmmMgr((hmm0, hmm1, hmm2)) spd = {} spd[(0, 1)] = (0.4, 0.3, 0.8) spd[(0, 2)] = (0.6, 0.7, 0.2) spd[(3, 4)] = (0.4, 0.3, 0.8) spd[(3, 5)] = (0.6, 0.7, 0.2) tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=spd) with DebugPrint("bwt_ctsh") if True else DebugPrint(): result_hmm = tg0.convert_to_standalone_hmm() ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string( full=True) return ret