Example #1
0
    def __init__(self, smooth=False):

        # ========================================== #
        #           Hyper-Parameters values to test
        # ========================================== #

        self.w_1 = [1, 2]
        self.w_2 = [1, 2]
        self.w_3 = [1, 2]
        self.w_4 = [1, 2]
        self.w_5 = [1, 2]

        # Binomial smoother
        self.binom_smoother = [2, 3, 4, 5]

        # Optimizer to choose:
        self.optimizer = 'LBFGSB'

        # Step size (if LBFGSB)
        self.step_size = [0.1, 0.05, 0.01]

        self.model = SEIR.SEIR()

        self.smoothing = smooth
        if self.smoothing:
            self.model.smoothing = True
        else:
            self.model.smoothing = False
        self.model.import_dataset()
        self.model.fit_type = 'type_1'
    def __init__(self, smooth=False):

        # ========================================== #
        #           Hyper-Parameters values to test
        # ========================================== #

        self.w_1 = [1, 2, 3]
        self.w_2 = [1, 2, 3]
        self.w_3 = [1, 2, 3]
        self.w_4 = [1, 2, 3]
        self.w_5 = [1, 2, 3]
        self.w_6 = [1, 2, 3]

        self.smoothing = smooth

        # Binomial smoother
        self.b_s_1 = [1, 2, 3, 4, 5, 6, 7, 8]
        self.b_s_2 = [1, 2, 3, 4, 5, 6, 7, 8]
        self.b_s_3 = [1, 2, 3, 4, 5, 6, 7, 8]
        self.b_s_4 = [1, 2, 3, 4, 5, 6, 7, 8]
        self.b_s_5 = [1, 2, 3, 4, 5, 6, 7, 8]
        self.b_s_6 = [1, 2, 3, 4, 5, 6, 7, 8]

        # Optimizer to choose: 0 = Cobyla, 1 = LBFGSB, 2 = Auto
        self.optimizer = ['COBYLA', 'LBFGSB', 'AUTO']

        # Spare in parts
        self.part = [[1, 2], [3, 4], [5, 6], [7, 8]]

        self.model = SEIR.SEIR()
        if self.smoothing:
            self.model.smoothing = True
        self.model.import_dataset()
        self.model.fit_type = 'type_1'
Example #3
0
 def step(self):
     ratios = [
         SEIR.count_exposed(population) / population.population_size
         for population in self.populations
     ]
     for population in self.populations:
         population.step()
     self.cross_influence(ratios)
Example #4
0
    'AR': 1 / 5,
    'YR': 1 / 5,
    'death rate': 0.01,
    'immunity period': None,
}


def get_metric(df):
    return df['Removed'].max()


p_values = np.linspace(0, 1, num=2)
removed_count = []
for p in p_values:
    # m_values = [5*x for x in range(1, 50)]
    # for m in m_values:
    logger.info("Starting simulation for m = " + str(p) + "\n")
    graph = nx.powerlaw_cluster_graph(model_parameters['population size'],
                                      100,
                                      p,
                                      seed=5)
    logger.info("Initialized graph")
    model = SEIR.Population(graph, model_parameters)
    model.run(50)
    df = model.get_data()
    removed_count.append(get_metric(df))

print(removed_count)
plt.plot(p_values, removed_count)
plt.show()
Example #5
0
populations = []
model_parameters = {
    'population size': 10000,
    'initial outbreak size': 100,
    'alpha': 0.7,
    'spread_chance': 0.005,
    'EAY': 1 / 5,
    'AR': 1 / 5,
    'YR': 1 / 5,
    'death rate': 0,
    'immunity period': 20
}

# Create the first population
graph = nx.powerlaw_cluster_graph(model_parameters['population size'], 100,
                                  0.01)
logger.info("Initialized graph")
model = SEIR.Population(graph, model_parameters)
populations.append(model)

# Make adjustments on the second population
graph_2 = nx.powerlaw_cluster_graph(model_parameters['population size'], 100,
                                    0.01)
model_2 = SEIR.Population(graph_2, model_parameters, social_distancing_func)
populations.append(model_2)

meta_population = MetaPopulation(populations, cross_influence_matrix)
meta_population.run(120)

meta_population.save_model("Models/meta_population_osc_control_lowimun")
Example #6
0

def social_distancing_func_simple(t):
    if 15 < t < 25:
        return 0.4
    else:
        return 1


model_steps = 50
graph = nx.powerlaw_cluster_graph(model_parameters['population size'],
                                  100,
                                  0.01,
                                  seed=5)
logger.info("Initialized graph")
model = SEIR.Population(graph, model_parameters, social_distancing_func_simple)
model.run(model_steps)
df = model.datacollector.get_model_vars_dataframe()
fig = df.plot()
#fig.show()
model2 = SEIR.Population(graph, model_parameters)
model2.run(model_steps)
df2 = model2.datacollector.get_model_vars_dataframe()
#fig, axs = plt.subplots(2)
ax = df.plot()
#df2.plot(ax=ax, grid=True, linestyle='--')
ax2 = df2.plot()
ax.show()
ax2.show()
#sdf_values = [social_distancing_func_simple(t) for t in range(0, model_steps)]
#axs[1].plot(sdf_values)
Example #7
0
    'population size': 10000,
    'initial outbreak size': 0,
    'alpha': 0.7,
    'spread_chance': 0.001,
    'EAY': 1 / 10,
    'AR': 1 / 10,
    'YR': 1 / 10,
    'death rate': 0,
    'immunity period': None
}

model_parameters = [model_parameters_0, model_parameters_1, model_parameters_2]
for mp in model_parameters:
    graph = nx.powerlaw_cluster_graph(mp['population size'], 100, 0.01)
    logger.info("Initialized graph")
    model = SEIR.Population(graph, mp)

    populations.append(model)

meta_population = MetaPopulation.MetaPopulation(populations,
                                                cross_influence_matrix)
meta_population.run(100)

fig, axs = plt.subplots(len(cross_influence_matrix))
dfs = [
    pop.datacollector.get_model_vars_dataframe()
    for pop in meta_population.populations
]
for ax, df in zip(axs, dfs):
    df.plot(ax=ax, grid=True)
    ax.set_title("Subpopulation")