print('method', method)
        print('dataset_id', dataset_id)

        input_dir = 'cascade/{}/'.format(dataset_id)
        if method != 'our':
            output_dir = 'output/{}/{}/'.format(method, dataset_id)
            eval_result_path = 'eval/{}/{}.pkl'.format(method, dataset_id)
        else:
            output_dir = 'output/{}-{}/{}/'.format(method, root_sampler,
                                                   dataset_id)
            eval_result_path = 'eval/{}-{}/{}.pkl'.format(
                method, root_sampler, dataset_id)

        eval_dir = os.path.dirname(eval_result_path)
        print('eval_dir', eval_dir)
        makedir_if_not_there(eval_dir)
        makedir_if_not_there(output_dir)

        if parallel:
            print('parallel: ON')
            if method == 'min-steiner-tree':
                n_jobs = 4  # memory reason
            else:
                n_jobs = max_n_jobs
            print('n_jobs', n_jobs)
            rows = Parallel(n_jobs=n_jobs)(
                delayed(one_run)(g,
                                 edge_weights,
                                 input_path,
                                 output_dir,
                                 method,
        edge_weights = g.edge_properties['weights']

        dataset_id = "{}-m{}-s{}-o{}-omuniform".format(graph, cascade_model,
                                                       cascade_fraction,
                                                       obs_fraction)
        print('method', method)
        print('dataset_id', dataset_id)

        input_dir = 'cascade/{}/'.format(dataset_id)
        output_dir = 'output/{}-{}/{}/'.format(method, root_sampler_name,
                                               dataset_id)
        eval_result_path = 'eval/{}-{}/{}.pkl'.format(method,
                                                      root_sampler_name,
                                                      dataset_id)

        makedir_if_not_there(output_dir)
        makedir_if_not_there(os.path.dirname(eval_result_path))

        rows = Parallel(n_jobs=n_jobs)(
            delayed(one_run)(g,
                             edge_weights,
                             input_path,
                             output_dir,
                             method,
                             root_sampler_name=root_sampler_name,
                             n_sample=n_sample)
            for input_path in tqdm(glob(input_dir + '*.pkl'))
            if not is_processed(input_path, output_dir))

        # assert len(rows) > 0, 'nothing calculated'
示例#3
0
                mu_mo=br.mu_mo,
                k_days=int(br.k_days),
                x0_pt=br.x0_pt)

pkl.dump(params, open('output/params_after_lockdown.pkl', 'wb'))

total, delta, increase, trans_data, aux = do_simulation(total_days + 60,
                                                        bed_info,
                                                        params,
                                                        p0_time=start_date)

I_true_all = df[(df['date'] > start_date)]['infected'].values
I_pred_all = increase[1:len(I_true_all) + 1, STATE.I]

I_pred = increase[1:(len(I_true) + 1), STATE.I]
dates = pd.date_range(start_date + timedelta(days=1),
                      end_date + timedelta(days=2))
data = {
    'date': dates,
    'true_I': I_true_all,
    'pred_I': I_pred_all,
    'abs_error': np.abs(I_true_all - I_pred_all),
    'squared_error': np.power(I_true_all - I_pred_all, 2),
    'used_in_fitting': [(d >= start_date) & (d <= end_date) for d in dates]
}

fit_df = pd.DataFrame.from_dict(data)

makedir_if_not_there('output/tbl/parameter_fitting')
fit_df.to_csv('output/tbl/parameter_fitting/daily-data.csv', index=None)
示例#4
0
文件: CIR.py 项目: tyqiu/Model-Design
        stages=[days_to_p0]
    )
    
    total, delta, increase, trans, stats = do_simulation(total_days, bed_info, params, p0_time=p0_time)
    bundle = [total, delta, increase, trans]
    return assumed_ld_date, delta_t, bundle, stats



file_path_factor='output/params_after_lockdown/'
file_list_factor=os.listdir(file_path_factor)
file_path_bed='data/bed_info/'
file_list_bed=os.listdir(file_path_bed)
for f_factor in file_list_factor:
    params_after=pkl.load(open(file_path_factor+f_factor, 'rb'))
    factor_one=f_factor.split(".pkl")[0].split("_")[3]
    for f_bed in file_list_bed:
        bed_info=pkl.load(open(file_path_bed+f_bed, 'rb'))
        bed_one=f_bed.split(".pkl")[0].split("_")[2]
        rows = Parallel(n_jobs=1)(delayed(one_run)(params_after,bed_info,delta_t, total_days) for  delta_t in range(-7, 8))
        makedir_if_not_there('figs/advance-or-delay-lockdown/')
        for dt, days, bundle, stats in rows:
            dt_str = dt.strftime('%y-%m-%d')
            print(days)
            print(dt_str)
            fig, ax = plot_total(bundle[0], p0_time, total_days)
            fig.savefig(f'figs/advance-or-delay-lockdown/factor-{factor_one}-bed-{bed_one}-{dt_str}({days}).pdf')
            save_bundle(bundle, p0_time, total_days, f'output/tbl/advance-or-delay-lockdown/factor-{factor_one}-bed-{bed_one}-{dt_str}-({days})/')
            save_to_json(stats, f'output/tbl/advance-or-delay-lockdown/factor-{factor_one}-bed-{bed_one}-{dt_str}-({days})/stats.txt')

示例#5
0
params

# In[14]:

stats

# In[15]:

p0_time + timedelta(days=total_days)

# In[16]:

from helpers import plot_total
fig, ax = plot_total(total, p0_time, total_days)
fig.savefig('figs/start2end.pdf')

# In[2]:

makedir_if_not_there('output/tbl/start2end')

# In[17]:

save_bundle([total, delta, increase, trans_data], p0_time, total_days,
            'output/tbl/start2end')

# In[18]:

path = 'output/tbl/start2end/stats.txt'
save_to_json(stats, path)
示例#6
0

# t is the number of days back
rows = Parallel(n_jobs=1)(delayed(one_run)(t) for t in tqdm(range(20, 61)))

df = pd.DataFrame(rows,
                  columns=('t', 'actual_I', 'pred_I', 'mse_I', 'mse_E',
                           'mse_M', 'mse_IM', 'mse_IEM', 'r0_info'))

df.sort_values(by='mse_I').head(10)

best_t = int(df.sort_values(by='mse_IM').iloc[0].t)
p0_time = T('2020/3/20') - timedelta(days=best_t)
print(p0_time)

makedir_if_not_there('output/tbl/p0-time/')

df.to_csv('output/tbl/p0-time/error.csv', index=False)

pkl.dump(p0_time, open('output/p0_time.pkl', 'wb'))

params = copy(params_jan27)
params.alpha = infection_factor * params.alpha
params.beta = infection_factor * params.beta
params.initial_num_E = 1
params.initial_num_I = 0
params.initial_num_M = 0

total, delta, increase, trans, stats = do_simulation(best_t,
                                                     bed_info,
                                                     params,