def find_most_violated_constraint_margin(problem, gt, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" assert(isinstance(problem, manhattan_utils.ManhattanProblem)) assert(isinstance(gt, manhattan_utils.ManhattanSolution)) data_weights,T = diagonal.unpack_weights(list(model.w)) data_terms = path.compute_data_terms(problem.F, data_weights) loss_terms = gt.compute_loss_terms(LossFunc) A = data_terms + loss_terms est_states,est_orients = diagonal.solve(A, T) hyp = manhattan_utils.ManhattanSolution(problem, est_states, est_orients) print '\nFinding most violated constraint' print ' w: ',list(model.w) print ' data w: ',data_weights print ' transition:\n',T print ' true y: ',gt.ys print ' classified ybar: ',hyp.ys print ' feature(true y): ',path.compute_path_features(problem.F, gt.pair) print ' feature(ybar): ',path.compute_path_features(problem.F, hyp.pair) print ' loss: ',gt.compute_loss(hyp, LossFunc) return hyp
def find_most_violated_constraint_margin(problem, gt, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" assert (isinstance(problem, manhattan_utils.ManhattanProblem)) assert (isinstance(gt, manhattan_utils.ManhattanSolution)) data_weights, T = diagonal.unpack_weights(list(model.w)) data_terms = path.compute_data_terms(problem.F, data_weights) loss_terms = gt.compute_loss_terms(LossFunc) A = data_terms + loss_terms est_states, est_orients = diagonal.solve(A, T) hyp = manhattan_utils.ManhattanSolution(problem, est_states, est_orients) print '\nFinding most violated constraint' print ' w: ', list(model.w) print ' data w: ', data_weights print ' transition:\n', T print ' true y: ', gt.ys print ' classified ybar: ', hyp.ys print ' feature(true y): ', path.compute_path_features(problem.F, gt.pair) print ' feature(ybar): ', path.compute_path_features(problem.F, hyp.pair) print ' loss: ', gt.compute_loss(hyp, LossFunc) return hyp
def classify_example(problem, model, sparm): """Given a pattern x, return the predicted label.""" data_weights,T = diagonal.unpack_weights(list(model.w)) A = path.compute_data_terms(problem.F, data_weights) hyp_path,hyp_orients = diagonal.solve(A, T) return manhattan_utils.ManhattanSolution(problem, hyp_path, hyp_orients)
def find_most_violated_constraint_margin(F, y, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" if len(y) != 2: raise Exception('y should be a pair (states,orients)') data_weights, T = diagonal.unpack_weights(list(model.w)) states, orients = y A = path.compute_loss_augmented_terms(F, data_weights, states, path.L2) ybar = diagonal.solve(A, T) if len(ybar) != 2: raise Exception('ybar should be a pair (states,orients)') print '\nFinding most violated constraint' print ' w: ', list(model.w) print ' data w: ', data_weights print ' transition:\n', T print ' true y: ', y print ' classified ybar: ', ybar print ' feature(true y): ', path.compute_path_features(F, y) print ' feature(ybar): ', path.compute_path_features(F, ybar) print ' loss: ', path.compute_loss(y[0], ybar[0], path.L2) return ybar
def classify_example(problem, model, sparm): """Given a pattern x, return the predicted label.""" data_weights, T = diagonal.unpack_weights(list(model.w)) A = path.compute_data_terms(problem.F, data_weights) hyp_path, hyp_orients = diagonal.solve(A, T) return manhattan_utils.ManhattanSolution(problem, hyp_path, hyp_orients)
def print_learning_stats(sample, model, cset, alpha, sparm): """Print statistics once learning has finished. This is called after training primarily to compute and print any statistics regarding the learning (e.g., training error) of the model on the training sample. You may also use it to make final changes to model before it is written out to a file. For example, if you defined any non-pickle-able attributes in model, this is a good time to turn them into a pickle-able object before it is written out. Also passed in is the set of constraints cset as a sequence of (left-hand-side, right-hand-side) two-element tuples, and an alpha of the same length holding the Lagrange multipliers for each constraint. The default behavior is that nothing is printed.""" data_weights, T = diagonal.unpack_weights(list(model.w)) print 'Data model learned: ', data_weights print 'Transition model learned:\n', T for i, (F, gt) in enumerate(sample): y = classify_example(F, model, sparm) print 'Example ', i, ':' print ' Classified: ', list(y[0]) print ' True: ', list(gt[0]) print ' Loss:', loss(y, gt, sparm) print 'All Losses:', [ loss(y, classify_example(F, model, sparm), sparm) for F, y in sample ]
def find_most_violated_constraint_margin(F, y, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" if len(y) != 2: raise Exception('y should be a pair (states,orients)') data_weights,T = diagonal.unpack_weights(list(model.w)) states,orients = y A = path.compute_loss_augmented_terms(F, data_weights, states, path.L2) ybar = diagonal.solve(A,T) if len(ybar) != 2: raise Exception('ybar should be a pair (states,orients)') print '\nFinding most violated constraint' print ' w: ',list(model.w) print ' data w: ',data_weights print ' transition:\n',T print ' true y: ',y print ' classified ybar: ',ybar print ' feature(true y): ',path.compute_path_features(F,y) print ' feature(ybar): ',path.compute_path_features(F,ybar) print ' loss: ',path.compute_loss(y[0], ybar[0], path.L2) return ybar
def print_learning_stats(sample, model, cset, alpha, sparm): """Print statistics once learning has finished. This is called after training primarily to compute and print any statistics regarding the learning (e.g., training error) of the model on the training sample. You may also use it to make final changes to model before it is written out to a file. For example, if you defined any non-pickle-able attributes in model, this is a good time to turn them into a pickle-able object before it is written out. Also passed in is the set of constraints cset as a sequence of (left-hand-side, right-hand-side) two-element tuples, and an alpha of the same length holding the Lagrange multipliers for each constraint. The default behavior is that nothing is printed.""" data_weights,T = diagonal.unpack_weights(list(model.w)) print 'Data model learned: ',data_weights print 'Transition model learned:\n',T for i,(F,gt) in enumerate(sample): y = classify_example(F, model, sparm) print 'Example ',i,':' print ' Classified: ',list(y[0]) print ' True: ',list(gt[0]) print ' Loss:',loss(y, gt, sparm) print 'All Losses:', [loss(y, classify_example(F, model, sparm), sparm) for F,y in sample]
def print_learning_stats(sample, model, cset, alpha, sparm): """Print statistics once learning has finished. This is called after training primarily to compute and print any statistics regarding the learning (e.g., training error) of the model on the training sample. You may also use it to make final changes to model before it is written out to a file. For example, if you defined any non-pickle-able attributes in model, this is a good time to turn them into a pickle-able object before it is written out. Also passed in is the set of constraints cset as a sequence of (left-hand-side, right-hand-side) two-element tuples, and an alpha of the same length holding the Lagrange multipliers for each constraint. The default behavior is that nothing is printed.""" data_weights,T = diagonal.unpack_weights(list(model.w)) print 'Data model learned: ',data_weights print 'Transition model learned:\n',T pdf = PdfPages('training_results.pdf') losses = [0]*len(sample) for i,(problem,gt) in enumerate(sample): hyp = classify_example(problem, model, sparm) losses[i] = loss(gt, hyp, sparm) print 'Example ',i,':' print ' Classified: ',list(hyp.ys) print ' True: ',list(gt.ys) print ' Loss:',losses[i] #image_path = eg.image_path #image = plt.imread(image_path) if os.path.exists(image_path) else None #if (image is not None): # plt.cla() # plt.title('Example %d (%s:%d)' % (i,eg.sequence,eg.frame_id)) # plt.imshow(image) # plt.plot(gt[0], 'w') # plt.plot(y[0], 'g') # pdf.savefig() plt.cla() plt.title('Example %s:%d' % (problem.data.sequence, problem.data.frame_id)) plt.imshow(path.compute_data_terms(problem.F, data_weights)) plt.plot(gt.ys, 'w') plt.plot(hyp.ys, 'g') plt.xlim(0, np.size(problem.F,1)+1) plt.ylim(0, np.size(problem.F,0)+1) pdf.savefig() pdf.close() print 'All Losses: ',losses
def print_learning_stats(sample, model, cset, alpha, sparm): """Print statistics once learning has finished. This is called after training primarily to compute and print any statistics regarding the learning (e.g., training error) of the model on the training sample. You may also use it to make final changes to model before it is written out to a file. For example, if you defined any non-pickle-able attributes in model, this is a good time to turn them into a pickle-able object before it is written out. Also passed in is the set of constraints cset as a sequence of (left-hand-side, right-hand-side) two-element tuples, and an alpha of the same length holding the Lagrange multipliers for each constraint. The default behavior is that nothing is printed.""" data_weights, T = diagonal.unpack_weights(list(model.w)) print 'Data model learned: ', data_weights print 'Transition model learned:\n', T pdf = PdfPages('training_results.pdf') losses = [0] * len(sample) for i, (problem, gt) in enumerate(sample): hyp = classify_example(problem, model, sparm) losses[i] = loss(gt, hyp, sparm) print 'Example ', i, ':' print ' Classified: ', list(hyp.ys) print ' True: ', list(gt.ys) print ' Loss:', losses[i] #image_path = eg.image_path #image = plt.imread(image_path) if os.path.exists(image_path) else None #if (image is not None): # plt.cla() # plt.title('Example %d (%s:%d)' % (i,eg.sequence,eg.frame_id)) # plt.imshow(image) # plt.plot(gt[0], 'w') # plt.plot(y[0], 'g') # pdf.savefig() plt.cla() plt.title('Example %s:%d' % (problem.data.sequence, problem.data.frame_id)) plt.imshow(path.compute_data_terms(problem.F, data_weights)) plt.plot(gt.ys, 'w') plt.plot(hyp.ys, 'g') plt.xlim(0, np.size(problem.F, 1) + 1) plt.ylim(0, np.size(problem.F, 0) + 1) pdf.savefig() pdf.close() print 'All Losses: ', losses
def print_iteration_stats(ceps, cached_constraint, sample, model, cset, alpha, sparm): """Called just before the end of each cutting plane iteration. This is called just before the end of each cutting plane iteration, primarily to print statistics. The 'ceps' argument is how much the most violated constraint was violated by. The 'cached_constraint' argument is true if this constraint was constructed from the cache. The default behavior is that nothing is printed.""" data_weights,T = diagonal.unpack_weights(list(model.w)) print 'Current data model: ',data_weights print 'Current transition model:\n',T
def print_iteration_stats(ceps, cached_constraint, sample, model, cset, alpha, sparm): """Called just before the end of each cutting plane iteration. This is called just before the end of each cutting plane iteration, primarily to print statistics. The 'ceps' argument is how much the most violated constraint was violated by. The 'cached_constraint' argument is true if this constraint was constructed from the cache. The default behavior is that nothing is printed.""" data_weights, T = diagonal.unpack_weights(list(model.w)) print 'Current data model: ', data_weights print 'Current transition model:\n', T
def classify_example(F, model, sparm): """Given a pattern x, return the predicted label.""" data_weights, T = diagonal.unpack_weights(list(model.w)) A = path.compute_data_terms(F, data_weights) return diagonal.solve(A, T)
def classify_example(F, model, sparm): """Given a pattern x, return the predicted label.""" data_weights,T = diagonal.unpack_weights(list(model.w)) A = path.compute_data_terms(F, data_weights) return diagonal.solve(A, T)