示例#1
0
def _test11():
    # A reduced version of test10
    ret = ""
    # GmmMgr setup

    num_states = 2
    dimension = 2
    models = []
    for i in xrange(num_states):
        dm = DummyModel(dimension, 1.0)
        models.append(dm)

    gmm_mgr = GmmMgr(models)

    gb = GraphBuilder()
    node_id0 = gb.new_node((0, 0))
    node_id1 = gb.new_node((1, 1))
    node_id2 = gb.new_node((2, 1))
    node_id3 = gb.new_node((3, 1))
    node_id4 = gb.new_node((4, 2))

    # The topology here is slightly complex than the previous example
    arc_id = gb.new_arc(node_id0, node_id1)
    arc_id = gb.new_arc(node_id1, node_id4)
    arc_id = gb.new_arc(node_id0, node_id2)
    arc_id = gb.new_arc(node_id2, node_id3)
    arc_id = gb.new_arc(node_id3, node_id4)
    arc_id = gb.new_arc(node_id2, node_id4)
    gr0 = FrozenGraph(gb)

    # Make two Hmms with 3 states and order 2 (self loop, forward 1)
    # The models in the middle are special and can skip.
    seed(0)
    hmm0 = make_forward_hmm(gmm_mgr, num_states, order=2, exact=False)
    hmm1 = Hmm(1)
    trans = array(((0.0, 0.5, 0.5), (0.0, 0.5, 0.5), (0.0, 0.0, 0.0)))
    hmm1.build_model(gmm_mgr, (0, ), 1, 1, trans)
    hmm2 = make_forward_hmm(gmm_mgr, num_states, order=2, exact=True)
    hmm_mgr = HmmMgr((hmm0, hmm1, hmm2))

    spd = {}
    spd[(0, 1)] = (0.4, )
    spd[(0, 2)] = (0.6, )

    spd[(2, 3)] = (0.4, )
    spd[(2, 4)] = (0.6, )

    tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=spd)

    if do_display:
        tg0.dot_display()
        tg0.dot_display(expand_hmms=True)

    with DebugPrint("bwt_ctsh") if True else DebugPrint():
        result_hmm = tg0.convert_to_standalone_hmm()
    ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string(
        full=True)

    return ret
示例#2
0
def read_hmm(stream, user_data, header_tokens):
    gmm_mgr, log_domain = user_data
    v,num_inputs = stream.read_scalar("num_inputs", int)
    v,num_states = stream.read_scalar("num_states", int)
    v,num_outputs = stream.read_scalar("num_outputs", int)
    s = num_inputs + num_states + num_outputs
    v,trans_array = stream.read_array("transition_matrix", rtype=float, dim=2, shape=(s,s))    
    v,models = stream.read_list("models", rtype=int, count=num_states)
    # Construct and return Hmm object
    ret = Hmm(num_states, log_domain=log_domain)
    ret.build_model(gmm_mgr, models, num_inputs, num_outputs, trans_array)
    return ret
示例#3
0
def _test9():
    # Like test8, but now HMMs have multiple inputs and outputs.
    ret = ""
    # GmmMgr setup

    num_states = 3
    dimension = 2
    models = []
    for i in xrange(num_states):
        dm = DummyModel(dimension, 1.0)
        models.append(dm)

    gmm_mgr = GmmMgr(models)

    gb = GraphBuilder()
    node_id0 = gb.new_node((0, 0))
    node_id1 = gb.new_node((1, 1))
    node_id2 = gb.new_node((2, 1))
    node_id3 = gb.new_node((3, 1))
    node_id4 = gb.new_node((4, 1))
    node_id5 = gb.new_node((5, 2))
    arc_id = gb.new_arc(node_id0, node_id1)
    arc_id = gb.new_arc(node_id1, node_id2)
    arc_id = gb.new_arc(node_id2, node_id3)
    arc_id = gb.new_arc(node_id3, node_id4)
    arc_id = gb.new_arc(node_id4, node_id5)
    gr0 = FrozenGraph(gb)

    # Make two Hmms with 3 states and order 3 (self loop, forward 1, forward 2)
    # The models in the middle are special and can skip directly
    seed(0)
    hmm0 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True)
    hmm1 = Hmm(1)
    trans = array(((0.0, 0.0, 0.0, 0.5, 0.5, 0.0,
                    0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.5,
                           0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.0,
                                  0.5), (0.0, 0.0, 0.0, 0.5, 0.35, 0.1, 0.05),
                   (0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                    0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                           0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)))
    hmm1.build_model(gmm_mgr, (0, ), 3, 3, trans)
    hmm2 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True)
    hmm_mgr = HmmMgr((hmm0, hmm1, hmm2))

    with DebugPrint("bwt_vrfy") if False else DebugPrint():
        tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=dict())

    result_hmm = tg0.convert_to_standalone_hmm()
    ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string(
        full=True)

    return ret
示例#4
0
def make_forward_hmm(gmm_mgr, num_states, order, models=None, exact=False):
    hmm0 = Hmm(num_states)
    # generate a set of random indices from the GmmMgr
    models = tuple(
        randint(0, gmm_mgr.num_models - 1)
        for i in xrange(num_states)) if models is None else models
    trans = tuple([p] * num_states for p in toy_probs(order))
    if exact:
        hmm0.build_forward_model_exact(gmm_mgr, models, order, trans)
    else:
        hmm0.build_forward_model_compact(gmm_mgr, models, order, trans)

    return hmm0
示例#5
0
    def add_epsilon_model(self, gmm_mgr, arity, log_domain=False):
        """
        Add an epsilon model with the given arity if it doesn't already exist.
        
        arity is an int with value > 0.  Returns the index of the model added.
        This call can only be made when the adaptation state is NOT_ADAPTING.

        >>> hmm_mgr0 = HmmMgr(12)
        >>> hmm_mgr0.add_epsilon_model(None, 1)
        0
        
        >>> hmm_mgr0.add_epsilon_model(None, 2)
        1
        
        >>> hmm_mgr0.add_epsilon_model(None, 4)
        2

        >>> print hmm_mgr0
        HmmMgr with 3 models

        >>> hmm_mgr0.input_arity_set
        set([1, 2, 4])

        >>> hmm_mgr0.output_arity_set
        set([1, 2, 4])

        >>> idx = hmm_mgr0.get_epsilon_model_index(2)

        # >>> hmm_mgr0[idx].dot_display()
        
        """
        self._require_state("NOT_ADAPTING")
        if arity <= 0:
            raise ValueError("Expected arity to be > 0, but got %d" % (arity,))

        if self.has_epsilon_model(arity):
            return self.get_epsilon_model_index()
        
        epsilon_model = Hmm(0, log_domain)
        order = arity + 1
        epsilon_model.build_forward_model_compact(gmm_mgr, (), order, repeat((), order))

        temp_models = list(self._models)
        temp_models.append(epsilon_model)
        self._models = tuple(temp_models)
        new_index = len(self._models) - 1 
        self._epsilon_model_map[arity] = new_index
        return new_index
示例#6
0
def test5(num_obs, do_display=False):
    # A test in which one of the HMMs has a transition from an input directly to
    # an output, so it can behave as an epsilon.  This node is between two other
    # nodes in a linear arrangement.

    # Data generator setup and data generation
    dimension = 2
    obs_gen = make_data_generator(dimension)
    obs_list = [obs_gen.next() for i in xrange(num_obs)]

    # GmmMgr setup
    num_models = 20
    models = make_standard_gmms(dimension, num_models)
    gmm_mgr1 = GmmMgr(models[0:10])
    gmm_mgr2 = GmmMgr(models[10:20])

    # Hmm setup
    # Make two Hmms with 2 states and order 2 (self loop, forward 1) The model
    # in the middle is special in that it can skip directly from the input state
    # to the output state.
    seed(0)
    num_states = 2
    hmm0 = make_forward_hmm(gmm_mgr1, num_states, 2, exact=False)
    hmm1 = Hmm(1)
    trans = array(((0.0, 0.5, 0.5), (0.0, 0.5, 0.5), (0.0, 0.0, 0.0)))
    hmm1.build_model(gmm_mgr1, (0, ), 1, 1, trans)
    hmm2 = make_forward_hmm(gmm_mgr1, num_states, 2, exact=False)
    hmm_mgr = HmmMgr((hmm0, hmm1, hmm2))

    # TrainingGraph setup
    gb = GraphBuilder()
    node_id0 = gb.new_node((0, 0))
    node_id1 = gb.new_node((1, 1))
    # node_id2 = gb.new_node((2,2))
    arc_id = gb.new_arc(node_id0, node_id1)
    # arc_id = gb.new_arc(node_id1, node_id2)
    gr0 = FrozenGraph(gb)
    tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=dict())

    if do_display:
        tg0.dot_display()
        tg0.dot_display(expand_hmms=True)

    valid, ret = validate_training_graph(tg0, gmm_mgr1, hmm_mgr, obs_list, 1,
                                         gmm_mgr2)
    return ret
示例#7
0
def _test10():
    # Like test9, but now HMMs are arranged in a diamond pattern so inter-HMM
    # probabilities come into play
    ret = ""
    # GmmMgr setup

    num_states = 3
    dimension = 2
    models = []
    for i in xrange(num_states):
        dm = DummyModel(dimension, 1.0)
        models.append(dm)

    gmm_mgr = GmmMgr(models)

    gb = GraphBuilder()
    node_id0 = gb.new_node((0, 0))
    node_id1 = gb.new_node((1, 1))
    node_id2 = gb.new_node((2, 1))
    node_id3 = gb.new_node((3, 1))
    node_id4 = gb.new_node((4, 1))
    node_id5 = gb.new_node((5, 2))

    # The topology here is more complex than previous examples
    arc_id = gb.new_arc(node_id0, node_id1)
    arc_id = gb.new_arc(node_id1, node_id5)
    arc_id = gb.new_arc(node_id0, node_id2)
    arc_id = gb.new_arc(node_id2, node_id3)
    arc_id = gb.new_arc(node_id3, node_id4)
    arc_id = gb.new_arc(node_id3, node_id5)
    arc_id = gb.new_arc(node_id4, node_id5)
    gr0 = FrozenGraph(gb)

    # Make two Hmms with 3 states and order 3 (self loop, forward 1, forward 2)
    # The models in the middle are special and can skip.
    seed(0)
    hmm0 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True)
    hmm1 = Hmm(1)
    trans = array(((0.0, 0.0, 0.0, 0.5, 0.5, 0.0,
                    0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.5,
                           0.0), (0.0, 0.0, 0.0, 0.5, 0.0, 0.0,
                                  0.5), (0.0, 0.0, 0.0, 0.5, 0.35, 0.1, 0.05),
                   (0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                    0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
                           0.0), (0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)))
    hmm1.build_model(gmm_mgr, (0, ), 3, 3, trans)
    hmm2 = make_forward_hmm(gmm_mgr, num_states, order=3, exact=True)
    hmm_mgr = HmmMgr((hmm0, hmm1, hmm2))

    spd = {}
    spd[(0, 1)] = (0.4, 0.3, 0.8)
    spd[(0, 2)] = (0.6, 0.7, 0.2)

    spd[(3, 4)] = (0.4, 0.3, 0.8)
    spd[(3, 5)] = (0.6, 0.7, 0.2)

    tg0 = TrainingGraph(gr0, hmm_mgr, split_prob_dict=spd)

    with DebugPrint("bwt_ctsh") if True else DebugPrint():
        result_hmm = tg0.convert_to_standalone_hmm()
    ret += "\n\n========= TG CONVERTED TO Hmm =========\n\n" + result_hmm.to_string(
        full=True)

    return ret