def __init__(self, model, max_iter=1000, C=1.0, verbose=0, n_jobs=1, show_loss_every=0, logger=None, batch_mode=False, line_search=True, check_dual_every=10, tol=.001, do_averaging=True): if n_jobs != 1: raise ValueError("FrankWolfeSSVM does not support multiprocessing" " yet. Ignoring n_jobs != 1.") BaseSSVM.__init__(self, model, max_iter, C, verbose=verbose, n_jobs=n_jobs, show_loss_every=show_loss_every, logger=logger) self.tol = tol self.batch_mode = batch_mode self.line_search = line_search self.check_dual_every = check_dual_every self.do_averaging = do_averaging
def __init__(self, model, max_iter=1000, C=1.0, verbose=0, n_jobs=1, show_loss_every=0, logger=None, batch_mode=False, line_search=True, check_dual_every=10, tol=.001, do_averaging=True, sample_method='perm', random_state=None, X_test=None, Y_test=None): if n_jobs != 1: warnings.warn("FrankWolfeSSVM does not support multiprocessing" " yet. Ignoring n_jobs != 1.") if sample_method not in ['perm', 'rnd', 'seq']: raise ValueError("sample_method can only be perm, rnd, or seq") BaseSSVM.__init__(self, model, max_iter, C, verbose=verbose, n_jobs=n_jobs, show_loss_every=show_loss_every, logger=logger) self.tol = tol self.batch_mode = batch_mode self.line_search = line_search self.check_dual_every = check_dual_every self.do_averaging = do_averaging self.sample_method = sample_method self.random_state = random_state self.X_test = X_test self.Y_test = Y_test self.oracle_errs = []
def __init__(self, model, max_iter=10000, C=1.0, check_constraints=False, verbose=0, negativity_constraint=None, n_jobs=1, break_on_bad=False, show_loss_every=0, tol=1e-3, inference_cache=0, inactive_threshold=1e-5, inactive_window=50, logger=None, cache_tol='auto', switch_to=None): BaseSSVM.__init__(self, model, max_iter, C, verbose=verbose, n_jobs=n_jobs, show_loss_every=show_loss_every, logger=logger) self.negativity_constraint = negativity_constraint self.check_constraints = check_constraints self.break_on_bad = break_on_bad self.tol = tol self.cache_tol = cache_tol self.inference_cache = inference_cache self.inactive_threshold = inactive_threshold self.inactive_window = inactive_window self.switch_to = switch_to self.qp_time = 0 self.inference_time = 0 self.inference_calls = 0 self.iterations_done = 0
def __init__(self, model, max_iter=100, C=1.0, verbose=0, momentum=0.0, learning_rate='auto', n_jobs=1, show_loss_every=0, decay_exponent=1, break_on_no_constraints=True, logger=None, batch_size=None, decay_t0=10, averaging=None, shuffle=False, check_every=1): BaseSSVM.__init__(self, model, max_iter, C, verbose=verbose, n_jobs=n_jobs, show_loss_every=show_loss_every, logger=logger) self.averaging = averaging self.break_on_no_constraints = break_on_no_constraints self.momentum = momentum self.learning_rate = learning_rate self.t = 0 self.decay_exponent = decay_exponent self.decay_t0 = decay_t0 self.batch_size = batch_size self.shuffle = shuffle self.alpha = 0.1 self.check_every = check_every
def __init__(self, model, max_iter=100, C=1.0, check_constraints=True, verbose=0, negativity_constraint=None, n_jobs=1, break_on_bad=False, show_loss_every=0, batch_size=10, tol=1e-3, inactive_threshold=1e-5, inactive_window=50, logger=None, switch_to=None): BaseSSVM.__init__(self, model, max_iter, C, verbose=verbose, n_jobs=n_jobs, show_loss_every=show_loss_every, logger=logger) self.negativity_constraint = negativity_constraint self.check_constraints = check_constraints self.break_on_bad = break_on_bad self.batch_size = batch_size self.tol = tol self.inactive_threshold = inactive_threshold self.inactive_window = inactive_window self.switch_to = switch_to self.w = np.ones(6)