def find_most_violated_constraint_margin(F, y, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" if len(y) != 2: raise Exception('y should be a pair (states,orients)') data_weights, T = diagonal.unpack_weights(list(model.w)) states, orients = y A = path.compute_loss_augmented_terms(F, data_weights, states, path.L2) ybar = diagonal.solve(A, T) if len(ybar) != 2: raise Exception('ybar should be a pair (states,orients)') print '\nFinding most violated constraint' print ' w: ', list(model.w) print ' data w: ', data_weights print ' transition:\n', T print ' true y: ', y print ' classified ybar: ', ybar print ' feature(true y): ', path.compute_path_features(F, y) print ' feature(ybar): ', path.compute_path_features(F, ybar) print ' loss: ', path.compute_loss(y[0], ybar[0], path.L2) return ybar
def find_most_violated_constraint_margin(problem, gt, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" assert(isinstance(problem, manhattan_utils.ManhattanProblem)) assert(isinstance(gt, manhattan_utils.ManhattanSolution)) data_weights,T = diagonal.unpack_weights(list(model.w)) data_terms = path.compute_data_terms(problem.F, data_weights) loss_terms = gt.compute_loss_terms(LossFunc) A = data_terms + loss_terms est_states,est_orients = diagonal.solve(A, T) hyp = manhattan_utils.ManhattanSolution(problem, est_states, est_orients) print '\nFinding most violated constraint' print ' w: ',list(model.w) print ' data w: ',data_weights print ' transition:\n',T print ' true y: ',gt.ys print ' classified ybar: ',hyp.ys print ' feature(true y): ',path.compute_path_features(problem.F, gt.pair) print ' feature(ybar): ',path.compute_path_features(problem.F, hyp.pair) print ' loss: ',gt.compute_loss(hyp, LossFunc) return hyp
def find_most_violated_constraint_margin(problem, gt, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" assert (isinstance(problem, manhattan_utils.ManhattanProblem)) assert (isinstance(gt, manhattan_utils.ManhattanSolution)) data_weights, T = diagonal.unpack_weights(list(model.w)) data_terms = path.compute_data_terms(problem.F, data_weights) loss_terms = gt.compute_loss_terms(LossFunc) A = data_terms + loss_terms est_states, est_orients = diagonal.solve(A, T) hyp = manhattan_utils.ManhattanSolution(problem, est_states, est_orients) print '\nFinding most violated constraint' print ' w: ', list(model.w) print ' data w: ', data_weights print ' transition:\n', T print ' true y: ', gt.ys print ' classified ybar: ', hyp.ys print ' feature(true y): ', path.compute_path_features(problem.F, gt.pair) print ' feature(ybar): ', path.compute_path_features(problem.F, hyp.pair) print ' loss: ', gt.compute_loss(hyp, LossFunc) return hyp
def find_most_violated_constraint_margin(F, y, model, sparm): """Return ybar associated with x's most violated constraint. The find most violated constraint function for margin rescaling. The default behavior is that this returns the value from the general find_most_violated_constraint function.""" if len(y) != 2: raise Exception('y should be a pair (states,orients)') data_weights,T = diagonal.unpack_weights(list(model.w)) states,orients = y A = path.compute_loss_augmented_terms(F, data_weights, states, path.L2) ybar = diagonal.solve(A,T) if len(ybar) != 2: raise Exception('ybar should be a pair (states,orients)') print '\nFinding most violated constraint' print ' w: ',list(model.w) print ' data w: ',data_weights print ' transition:\n',T print ' true y: ',y print ' classified ybar: ',ybar print ' feature(true y): ',path.compute_path_features(F,y) print ' feature(ybar): ',path.compute_path_features(F,ybar) print ' loss: ',path.compute_loss(y[0], ybar[0], path.L2) return ybar
def psi(problem, hyp, model, sparm): """Return a feature vector representing pattern x and label y. This is the combined feature function, which this returns either a svmapi.Sparse object, or sequence of svmapi.Sparse objects (useful during kernel evaluations, as all components undergo kernel evaluation separately). There is no default behavior.""" assert(isinstance(problem, manhattan_utils.ManhattanProblem)) assert(isinstance(hyp, manhattan_utils.ManhattanSolution)) return svmapi.Sparse(path.compute_path_features(problem.F, hyp.pair))
def psi(problem, hyp, model, sparm): """Return a feature vector representing pattern x and label y. This is the combined feature function, which this returns either a svmapi.Sparse object, or sequence of svmapi.Sparse objects (useful during kernel evaluations, as all components undergo kernel evaluation separately). There is no default behavior.""" assert (isinstance(problem, manhattan_utils.ManhattanProblem)) assert (isinstance(hyp, manhattan_utils.ManhattanSolution)) return svmapi.Sparse(path.compute_path_features(problem.F, hyp.pair))
def psi(F, y, model, sparm): """Return a feature vector representing pattern x and label y. This is the combined feature function, which this returns either a svmapi.Sparse object, or sequence of svmapi.Sparse objects (useful during kernel evaluations, as all components undergo kernel evaluation separately). There is no default behavior.""" if len(y) != 2: raise Exception('y should be a pair (states,orients)') return svmapi.Sparse(path.compute_path_features(F, y))
def init_model(sample, model, sparm): """Initializes the learning model. Initialize the structure model model. The model.size_psi must be set to the number of features. The ancillary purpose is to add any information to model that is necessary from the user code perspective. This function returns nothing.""" # The weights are not simple the length of the data features: they # also include terms for the transition matrix gt_ftr = path.compute_path_features(sample[0][0].F, sample[0][1].pair) assert(np.ndim(gt_ftr) == 1) model.size_psi = len(gt_ftr)
def init_model(sample, model, sparm): """Initializes the learning model. Initialize the structure model model. The model.size_psi must be set to the number of features. The ancillary purpose is to add any information to model that is necessary from the user code perspective. This function returns nothing.""" # The weights are not simple the length of the data features: they # also include terms for the transition matrix gt_ftr = path.compute_path_features(sample[0][0].F, sample[0][1].pair) assert (np.ndim(gt_ftr) == 1) model.size_psi = len(gt_ftr)