Esempio n. 1
0
    train_size = 2000
    decay = 0.0
    hack_trivial = False
    epochs = 3000
    lr = 8e-4

    # How many solutions I want to generate
    n_draws = 10

    # How many times I want to fit a single trajectory, getting the best result
    n_trials = 3

    fit_epochs = 1000

    # Init model
    sir = SIRNetwork(input=5, layers=4, hidden=50)

    model_name = 'i_0={}_r_0={}_betas={}_gammas={}.pt'.format(i_0_set, r_0_set,
                                                              betas,
                                                              gammas)

    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(
            ROOT_DIR + '/models/SIR_bundle_total/{}'.format(model_name))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        writer = SummaryWriter(
            'runs/' + '{}'.format(model_name))
        sir, train_losses, run_time, optimizer = train_bundle(sir, initial_conditions_set, t_final=t_final,
import matplotlib.pyplot as plt

if __name__ == '__main__':
    source_s_0_bundle = [0.995, 1.0]
    source_beta_bundle = [0.72, 0.88]
    source_gamma_bundle = [0.2, 0.35]
    source_sigma = 0.0

    initial_conditions_set = []
    t_0 = 0
    t_final = 20
    initial_conditions_set.append(t_0)
    initial_conditions_set.append(source_s_0_bundle)

    # Init model
    sir = SIRNetwork(input=4, layers=4, hidden=50)
    lr = 8e-4

    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(ROOT_DIR + '/models/SIR_bundle_total/b_s_0={}'
                                '_betas={}_gammas={}_noise_{}.pt'.format(
                                    source_s_0_bundle, source_beta_bundle,
                                    source_gamma_bundle, source_sigma))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        source_epochs = 5000
        source_hack_trivial = False
        source_train_size = 2500
        source_decay = 1e-4
    initial_conditions_set.append(t_0)
    initial_conditions_set.append(s_0_bundle)

    # Sanity check
    assert 0 not in beta_bundle and 0 not in gamma_bundle

    # Model parameters
    train_size = 2000
    decay = 0.0
    hack_trivial = False
    epochs = 3000
    lr = 8e-4
    sigma = 0.0

    # Init model
    sir = SIRNetwork(input=4, layers=4, hidden=50)

    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(
            ROOT_DIR + '/models/SIR_bundle_total/b_s_0={}'
            '_betas={}_gammas={}_noise_{}.pt'.format(s_0_bundle, beta_bundle,
                                                     gamma_bundle, sigma))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        writer = SummaryWriter(
            'runs/' + 'b_s_0={}'
            '_betas={}_gammas={}_noise_{}.pt'.format(s_0_bundle, beta_bundle,
                                                     gamma_bundle, sigma))
        sir, train_losses, run_time, optimizer = train_bundle_total(
Esempio n. 4
0
    initial_conditions_set.append(i_0_set)
    initial_conditions_set.append(r_0_set)

    # How many times I want to fit the trajectory, getting the best result
    n_trials = 1
    fit_epochs = 300

    # Model parameters
    train_size = 2000
    decay = 1e-4
    hack_trivial = False
    epochs = 3000
    lr = 8e-4

    # Init model
    sir = SIRNetwork(input=5, layers=4, hidden=50)

    model_name = 'i_0={}_r_0={}_betas={}_gammas={}.pt'.format(i_0_set, r_0_set,
                                                              betas,
                                                              gammas)

    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(
            ROOT_DIR + '/models/SIR_bundle_total/{}'.format(model_name))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        writer = SummaryWriter('runs/{}'.format(model_name))
        sir, train_losses, run_time, optimizer = train_bundle(sir, initial_conditions_set, t_final=t_final,
                                                              epochs=epochs,
Esempio n. 5
0
if __name__ == '__main__':
    # File to apply finetuning on a pretrained model

    source_i_0_set = [0.1, 0.2]
    source_r_0_set = [0.1, 0.2]
    source_betas = [0.6, 0.8]
    source_gammas = [0.1, 0.2]

    initial_conditions_set = []
    t_0 = 0
    t_final = 20
    initial_conditions_set.append(t_0)
    initial_conditions_set.append(source_i_0_set)
    initial_conditions_set.append(source_r_0_set)
    # Init model
    sir = SIRNetwork(input=5, layers=4, hidden=50)
    lr = 8e-4

    source_model_name = 'i_0={}_r_0={}_betas={}_gammas={}.pt'.format(
        source_i_0_set, source_r_0_set, source_betas, source_gammas)

    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(
            ROOT_DIR + '/models/SIR_bundle_total/{}'.format(source_model_name))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        source_epochs = 20000
        source_hack_trivial = 0
        source_train_size = 2000
Esempio n. 6
0
    gammas = [0.4, 0.7]

    # Model parameters
    initial_conditions_set = []
    initial_conditions_set.append(t_0)
    initial_conditions_set.append(i_0_set)
    initial_conditions_set.append(r_0_set)

    train_size = 1000
    decay = 1e-3
    hack_trivial = 0
    epochs = 1000
    lr = 8e-4

    # Init model
    sir = SIRNetwork(input=5, layers=4, hidden=50, output=3)

    model_name = 'i_0={}_r_0={}_betas={}_gammas={}.pt'.format(
        i_0_set, r_0_set, betas, gammas)
    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(
            ROOT_DIR + '/models/SIR_bundle_total/{}'.format(model_name))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        writer = SummaryWriter('runs/{}'.format(model_name))
        sir, train_losses, run_time, optimizer = train_bundle(
            sir,
            initial_conditions_set,
            t_final=t_final,
Esempio n. 7
0
            log_params_losses = [
                item for sublist in log_params_losses for item in sublist
            ]
            log_params_losses = [float(x) for x in log_params_losses]

        with open('csv\\log_inits_losses.csv', newline='') as f:
            reader = csv.reader(f)
            log_inits_losses = list(reader)
            log_inits_losses = [
                item for sublist in log_inits_losses for item in sublist
            ]
            log_inits_losses = [float(x) for x in log_inits_losses]

    except:
        # Init model
        sir = SIRNetwork(input=5, layers=4, hidden=50)

        models = []
        for model_index in range(1, max_model_index + 1, 1):
            model_name = '({})_i_0={}_r_0={}_betas={}_gammas={}.pt'.format(
                model_index, i_0_set, r_0_set, betas, gammas)
            # Init model
            sir = SIRNetwork(input=5, layers=4, hidden=50)
            checkpoint = torch.load(
                ROOT_DIR + '/models/SIR_bundle_total/{}'.format(model_name))
            # Load the model
            sir.load_state_dict(checkpoint['model_state_dict'])
            models.append(sir)

        n_draws = 300
        steps = 20
    assert i_0 + s_0 + r_0 == rescaling_factor

    # Model parameters
    t_final = 20
    train_size = 2500
    decay = 0.0
    hack_trivial = False
    epochs = 1000
    lr = 8e-4

    # Scipy solver solution
    t = np.linspace(0, t_final, t_final)
    s_p, i_p, r_p = SIR_solution(t, s_0, i_0, r_0, beta, gamma)

    # Init model
    sir = SIRNetwork(layers=2, hidden=50)

    try:
        # It tries to load the model, otherwise it trains it
        checkpoint = torch.load(
            ROOT_DIR + '/models/SIR/s_0={:.2f}-i_0={:.2f}-r_0={:.2f}'
                       '-t_0={}-t_f={:.2f}_beta={}_gamma={}.pt'.format(s_0,
                                                                       i_0, r_0,
                                                                       initial_conditions[0],
                                                                       t_final, beta,
                                                                       gamma))
    except FileNotFoundError:
        # Train
        optimizer = torch.optim.Adam(sir.parameters(), lr=lr)
        writer = SummaryWriter(
            'runs/' + 's_0={:.2f}-i_0={:.2f}-r_0={:.2f}-t_0={:.2f}-t_f={:.2f}_beta={}_gamma={}.pt'.format(s_0,