Exemplo n.º 1
0
def find_most_violated_constraint_margin(problem, gt, model, sparm):
    """Return ybar associated with x's most violated constraint.

    The find most violated constraint function for margin rescaling.
    The default behavior is that this returns the value from the
    general find_most_violated_constraint function."""

    assert(isinstance(problem, manhattan_utils.ManhattanProblem))
    assert(isinstance(gt, manhattan_utils.ManhattanSolution))

    data_weights,T = diagonal.unpack_weights(list(model.w))
    data_terms = path.compute_data_terms(problem.F, data_weights)
    loss_terms = gt.compute_loss_terms(LossFunc)
    A = data_terms + loss_terms
    est_states,est_orients = diagonal.solve(A, T)
    hyp = manhattan_utils.ManhattanSolution(problem, est_states, est_orients)

    print '\nFinding most violated constraint'
    print '  w: ',list(model.w)
    print '  data w: ',data_weights
    print '  transition:\n',T
    print '  true y: ',gt.ys
    print '  classified ybar: ',hyp.ys
    print '  feature(true y): ',path.compute_path_features(problem.F, gt.pair)
    print '  feature(ybar): ',path.compute_path_features(problem.F, hyp.pair)
    print '  loss: ',gt.compute_loss(hyp, LossFunc)

    return hyp
Exemplo n.º 2
0
def classify_example(problem, model, sparm):
    """Given a pattern x, return the predicted label."""

    data_weights, T = diagonal.unpack_weights(list(model.w))
    A = path.compute_data_terms(problem.F, data_weights)
    hyp_path, hyp_orients = diagonal.solve(A, T)
    return manhattan_utils.ManhattanSolution(problem, hyp_path, hyp_orients)
Exemplo n.º 3
0
def classify_example(problem, model, sparm):
    """Given a pattern x, return the predicted label."""

    data_weights,T = diagonal.unpack_weights(list(model.w))
    A = path.compute_data_terms(problem.F, data_weights)
    hyp_path,hyp_orients = diagonal.solve(A, T)
    return manhattan_utils.ManhattanSolution(problem, hyp_path, hyp_orients)
Exemplo n.º 4
0
def find_most_violated_constraint_margin(problem, gt, model, sparm):
    """Return ybar associated with x's most violated constraint.

    The find most violated constraint function for margin rescaling.
    The default behavior is that this returns the value from the
    general find_most_violated_constraint function."""

    assert (isinstance(problem, manhattan_utils.ManhattanProblem))
    assert (isinstance(gt, manhattan_utils.ManhattanSolution))

    data_weights, T = diagonal.unpack_weights(list(model.w))
    data_terms = path.compute_data_terms(problem.F, data_weights)
    loss_terms = gt.compute_loss_terms(LossFunc)
    A = data_terms + loss_terms
    est_states, est_orients = diagonal.solve(A, T)
    hyp = manhattan_utils.ManhattanSolution(problem, est_states, est_orients)

    print '\nFinding most violated constraint'
    print '  w: ', list(model.w)
    print '  data w: ', data_weights
    print '  transition:\n', T
    print '  true y: ', gt.ys
    print '  classified ybar: ', hyp.ys
    print '  feature(true y): ', path.compute_path_features(problem.F, gt.pair)
    print '  feature(ybar): ', path.compute_path_features(problem.F, hyp.pair)
    print '  loss: ', gt.compute_loss(hyp, LossFunc)

    return hyp
Exemplo n.º 5
0
def print_learning_stats(sample, model, cset, alpha, sparm):
    """Print statistics once learning has finished.
    
    This is called after training primarily to compute and print any
    statistics regarding the learning (e.g., training error) of the
    model on the training sample.  You may also use it to make final
    changes to model before it is written out to a file.  For example, if
    you defined any non-pickle-able attributes in model, this is a good
    time to turn them into a pickle-able object before it is written
    out.  Also passed in is the set of constraints cset as a sequence
    of (left-hand-side, right-hand-side) two-element tuples, and an
    alpha of the same length holding the Lagrange multipliers for each
    constraint.

    The default behavior is that nothing is printed."""

    data_weights,T = diagonal.unpack_weights(list(model.w))
    print 'Data model learned: ',data_weights
    print 'Transition model learned:\n',T

    pdf = PdfPages('training_results.pdf')

    losses = [0]*len(sample)
    for i,(problem,gt) in enumerate(sample):
        hyp = classify_example(problem, model, sparm)
        losses[i] = loss(gt, hyp, sparm)

        print 'Example ',i,':'
        print '  Classified: ',list(hyp.ys)
        print '  True: ',list(gt.ys)
        print '  Loss:',losses[i]

        #image_path = eg.image_path
        #image = plt.imread(image_path) if os.path.exists(image_path) else None
        #if (image is not None):
        #    plt.cla()
        #    plt.title('Example %d (%s:%d)' % (i,eg.sequence,eg.frame_id))
        #    plt.imshow(image)
        #    plt.plot(gt[0], 'w')
        #    plt.plot(y[0], 'g')
        #    pdf.savefig()

        plt.cla()
        plt.title('Example %s:%d' % (problem.data.sequence, 
                                     problem.data.frame_id))
        plt.imshow(path.compute_data_terms(problem.F, data_weights))
        plt.plot(gt.ys, 'w')
        plt.plot(hyp.ys, 'g')
        plt.xlim(0, np.size(problem.F,1)+1)
        plt.ylim(0, np.size(problem.F,0)+1)
        pdf.savefig()

    pdf.close()

    print 'All Losses: ',losses
Exemplo n.º 6
0
def print_learning_stats(sample, model, cset, alpha, sparm):
    """Print statistics once learning has finished.
    
    This is called after training primarily to compute and print any
    statistics regarding the learning (e.g., training error) of the
    model on the training sample.  You may also use it to make final
    changes to model before it is written out to a file.  For example, if
    you defined any non-pickle-able attributes in model, this is a good
    time to turn them into a pickle-able object before it is written
    out.  Also passed in is the set of constraints cset as a sequence
    of (left-hand-side, right-hand-side) two-element tuples, and an
    alpha of the same length holding the Lagrange multipliers for each
    constraint.

    The default behavior is that nothing is printed."""

    data_weights, T = diagonal.unpack_weights(list(model.w))
    print 'Data model learned: ', data_weights
    print 'Transition model learned:\n', T

    pdf = PdfPages('training_results.pdf')

    losses = [0] * len(sample)
    for i, (problem, gt) in enumerate(sample):
        hyp = classify_example(problem, model, sparm)
        losses[i] = loss(gt, hyp, sparm)

        print 'Example ', i, ':'
        print '  Classified: ', list(hyp.ys)
        print '  True: ', list(gt.ys)
        print '  Loss:', losses[i]

        #image_path = eg.image_path
        #image = plt.imread(image_path) if os.path.exists(image_path) else None
        #if (image is not None):
        #    plt.cla()
        #    plt.title('Example %d (%s:%d)' % (i,eg.sequence,eg.frame_id))
        #    plt.imshow(image)
        #    plt.plot(gt[0], 'w')
        #    plt.plot(y[0], 'g')
        #    pdf.savefig()

        plt.cla()
        plt.title('Example %s:%d' %
                  (problem.data.sequence, problem.data.frame_id))
        plt.imshow(path.compute_data_terms(problem.F, data_weights))
        plt.plot(gt.ys, 'w')
        plt.plot(hyp.ys, 'g')
        plt.xlim(0, np.size(problem.F, 1) + 1)
        plt.ylim(0, np.size(problem.F, 0) + 1)
        pdf.savefig()

    pdf.close()

    print 'All Losses: ', losses
Exemplo n.º 7
0
def find_most_violated_constraint_margin(x, y, model, sparm):
    """Return ybar associated with x's most violated constraint.

    The find most violated constraint function for margin rescaling.
    The default behavior is that this returns the value from the
    general find_most_violated_constraint function."""

    w = list(model.w)

    print '\nFinding most violated constraint'
    print '  w: ',w
    print '  y: ',y

    A = path.compute_loss_augmented_terms(x, w, y, path.L2)
    ybar = viterbi.solve(A)

    D = path.compute_data_terms(x, w)
    print '  ybar: ',ybar
    print '  loss: ',path.compute_loss(y, ybar, path.L2)
    #print 'Data terms:\n', np.round(D, 2)
    #print 'Loss augmented terms:\n', np.round(A, 2)

    return ybar
def classify_example(F, model, sparm):
    """Given a pattern x, return the predicted label."""

    data_weights, T = diagonal.unpack_weights(list(model.w))
    A = path.compute_data_terms(F, data_weights)
    return diagonal.solve(A, T)
Exemplo n.º 9
0
def classify_example(x, model, sparm):
    """Given a pattern x, return the predicted label."""

    w = list(model.w)
    return viterbi.solve(path.compute_data_terms(x, w))
def classify_example(F, model, sparm):
    """Given a pattern x, return the predicted label."""

    data_weights,T = diagonal.unpack_weights(list(model.w))
    A = path.compute_data_terms(F, data_weights)
    return diagonal.solve(A, T)