def __init__(self, N, D, m, sigma_p, num_steps, step_size, max_steps=None, sigma0=0.5, lmbda0=0.0001, learn_parameters=False): IndependentJob.__init__(self, TrajectoryJobResultAggregator()) # job ressources if N <= 2000 or D < 10: self.walltime = 1 * 60 * 60 self.memory = 2 elif N <= 5000 or D < 50: self.walltime = 24 * 60 * 60 self.memory = 3 else: self.walltime = 72 * 60 * 60 self.memory = 8 self.N = N self.D = D self.m = m self.sigma0 = sigma0 self.lmbda0 = lmbda0 self.sigma_p = sigma_p self.num_steps = num_steps self.step_size = step_size self.max_steps = max_steps self.learn_parameters = learn_parameters
def __init__(self, aggregator, ozone_posterior, tau, kappa): IndependentJob.__init__(self, aggregator) self.ozone_posterior = OzonePosterior(ozone_posterior.prior, ozone_posterior.logdet_method, ozone_posterior.solve_method) self.tau = tau self.kappa = kappa
def __init__(self, aggregator, mod_prm=None, data=None, name=None, save_dir=None): IndependentJob.__init__(self, aggregator) self.mod_prm = mod_prm self.data = data self.name = name self.save_dir=save_dir
def __init__(self, aggregator, prob_label, rep, n, job_func): walltime = 60*59*24 memory = int(tr_proportion*n*1e-1) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.prob_label = prob_label self.rep = rep self.n = n self.job_func = job_func
def __init__(self, db_fname, result_name="result", seed=None, **param_dict): IndependentJob.__init__(self, ScalarResultAggregator()) self.db_fname = db_fname self.param_dict = param_dict self.result_name = result_name if seed is None: # if no seed is set unsigned 32bit int, and store seed = np.random.randint(2 ** 32 - 1) self.seed = seed
def __init__(self, aggregator, sample_source, prob_label, rep, job_func): d = sample_source.dim() walltime = 60*59*24 if d*sample_size*tr_proportion/15 >= 8000 else 60*59 memory = int(tr_proportion*sample_size*1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.sample_source = sample_source self.prob_label = prob_label self.rep = rep self.job_func = job_func
def __init__(self, aggregator, sample_source, prob_label, rep, job_func): d = sample_source.dim() #walltime = 60*59*24 if d*sample_size*tr_proportion/15 >= 8000 else 60*59 walltime = 60*59*24 memory = int(tr_proportion*sample_size*1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.sample_source = sample_source self.prob_label = prob_label self.rep = rep self.job_func = job_func
def __init__(self, aggregator, prob_label, rep, met_func, n): #walltime = 60*59*24 walltime = 60 * 59 memory = 54272 #int(n*1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.prob_label = prob_label self.rep = rep self.met_func = met_func self.n = n
def __init__(self, aggregator, p, data_source, prob_label, rep, job_func, n): #walltime = 60*59*24 walltime = 60*59 memory = int(tr_proportion*n*1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) # p: an UnnormalizedDensity self.p = p self.data_source = data_source self.prob_label = prob_label self.rep = rep self.job_func = job_func self.n = n
def __init__(self, aggregator, folder_path, prob_label, rep, job_func, n): #walltime = 60*59*24 walltime = 60 * 59 memory = int(tr_proportion * n * 1e-2) + 100 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.folder_path = folder_path self.prob_label = prob_label self.rep = rep self.job_func = job_func self.n = n
def __init__(self, aggregator, folder_path, prob_label, rep, job_func): walltime = 60 * 59 * 24 pl = exglo.parse_prob_label(prob_label) n = pl['n'] memory = int(tr_proportion * n * 1e-2) + 100 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.folder_path = folder_path self.prob_label = prob_label self.rep = rep self.job_func = job_func
def __init__(self, aggregator, paired_source, prob_label, rep, job_func, prob_param): #walltime = 60*59*24 walltime = 60 * 59 memory = int(tr_proportion * sample_size * 1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.paired_source = paired_source self.prob_label = prob_label self.rep = rep self.job_func = job_func self.prob_param = prob_param
def __init__(self, aggregator, sample_source, prob_label, rep, ni, n, job_func): d = sample_source.dim() ntr = int(n*tr_proportion) #walltime = 60*59*24 if d*ntr/15 >= 8000 else 60*59 walltime = 60*59*24 memory = int(ntr*1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.sample_source = sample_source self.prob_label = prob_label self.rep = rep self.ni = ni self.n = n self.job_func = job_func
def __init__(self, db_fname, result_name="result", seed=None, **param_dict): IndependentJob.__init__(self, ScalarResultAggregator()) self.db_fname = db_fname self.param_dict = param_dict self.result_name = result_name if seed is None: # if no seed is set unsigned 32bit int, and store seed = np.random.randint(2**32 - 1) self.seed = seed
def __init__(self, aggregator, paired_source, prob_label, rep, job_func, n): #walltime = 60*59*24 #walltime = 60*59 if n < 100000 else 60*59*24 walltime = 60 * 59 memory = int(tr_proportion * n * 1e-2) + 100 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.paired_source = paired_source self.prob_label = prob_label self.rep = rep self.job_func = job_func self.n = n
def __init__(self, N, D, lmbda, sigma_p, num_steps, step_size, max_steps=None): IndependentJob.__init__(self, TrajectoryJobResultAggregator()) self.N = N self.D = D self.lmbda = lmbda self.sigma_p = sigma_p self.num_steps = num_steps self.step_size = step_size self.max_steps = max_steps
def __init__(self, aggregator, mix_ratios, data_loader, prob_label, rep, met_func, n): walltime = 60 * 59 * 24 # walltime = 60 * 59 memory = int(n * 1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) # P, P are kmod.model.Model self.mix_ratios = mix_ratios self.data_loader = data_loader self.prob_label = prob_label self.rep = rep self.met_func = met_func self.n = n
def __init__(self, aggregator, sample_source, prob_label, rep, ni, n, job_func): d = sample_source.dim() ntr = int(n * tr_proportion) #walltime = 60*59*24 if d*ntr/15 >= 8000 else 60*59 walltime = 60 * 59 * 24 memory = int(ntr * 1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) self.sample_source = sample_source self.prob_label = prob_label self.rep = rep self.ni = ni self.n = n self.job_func = job_func
def __init__(self, aggregator, P, Q, data_source, prob_label, rep, met_func, n): #walltime = 60*59*24 walltime = 60 * 59 memory = int(n * 1e-2) + 50 IndependentJob.__init__(self, aggregator, walltime=walltime, memory=memory) # P, P are kmod.model.Model self.P = P self.Q = Q self.data_source = data_source self.prob_label = prob_label self.rep = rep self.met_func = met_func self.n = n
def __init__(self, target, num_iterations, D, start, statistics={}, num_warmup=500, thin_step=1): IndependentJob.__init__(self, MCMCJobResultAggregator()) self.target = target self.num_iterations = num_iterations self.D = D self.start = start self.statistics = statistics self.num_warmup = num_warmup self.thin_step = thin_step assert len(start.shape) == 1 assert len(start) == D # short queue self.walltime = 1 * 60 * 60 # running average acceptance prob self.avg_accept = 0. self.recompute_log_pdf = False
def __init__(self, aggregator, sleep_time, walltime=1, memory=1, nodes=1): IndependentJob.__init__(self, aggregator, walltime, memory, nodes) self.sleep_time = sleep_time
def __init__(self, aggregator): IndependentJob.__init__(self, aggregator)