def create_city_and_serialize(city_name, scale, params_to_change): """ Generate population of a given city. Done once for each triplet (city, scale, params_to_change). :param city_name: str city name, "all" for entire country :param scale: float between 0-1, that states the size proportion of the city. 1 if for actual size :param params_to_change: dict of params to change, in Params object :return: World object """ config_path = os.path.dirname(__file__) + "/config.json" with open(config_path) as json_data_file: ConfigData = json.load(json_data_file) citiesDataPath = ConfigData['CitiesFilePath'] paramsDataPath = ConfigData['ParamsFilePath'] Params.load_from(os.path.join(os.path.dirname(__file__), paramsDataPath), override=True) #Check if dictionary is empty if not params_to_change: for param, val in params_to_change.items(): Params.loader()[param] = val population_loader = PopulationLoader( citiesDataPath, added_description=Params.loader().description(), with_caching=False) else: population_loader = PopulationLoader(citiesDataPath, added_description="", with_caching=False) population_loader.get_world(city_name=city_name, scale=scale)
def __init__(self, age, environments=None): params = Params.loader()['population'] R0 = params["R0_percent"] StartAsRecovered = False #if random.random() < R0: # StartAsRecovered = True self._changed = True if not environments: environments = [] self._age = age assert len(set([ env.name for env in environments ])) == len(environments), "Got duplicate environment names" self._environments = {env.name: env for env in environments} self._current_routine = { env_name: 1 for env_name in self._environments } params = Params.loader()['person'] self._infectiousness_prob = \ min(params['base_infectiousness'] * \ _np.random.gamma( params['individual_infectiousness_gamma_shape'], params['individual_infectiousness_gamma_scale'] ), 1) #if StartAsRecovered: # self._disease_state = DiseaseState.IMMUNE # self.is_susceptible = False # self.is_infected = True #else: self._disease_state = DiseaseState.SUSCEPTIBLE self.is_susceptible = True self.is_dead = False self.is_infectious = False self.is_infected = False str_type = params['state_macine_type'] assert str_type in ['SIRS', 'SIR'] self.state_machine_type = machine_type[str_type] self._id = Person.num_people_so_far # hold all the events that are triggered by some disease state(s) change(s), like isolation when symptomatic self.state_to_events = {} # The following counts the number of different interventions that force each routine change on this person. # For instance, I might be in quarantine because I'm old and because I'm symptomatic. # Without this counter, people could go into quarantine because they're old, get symptoms during quarantine, # then go out of quarantine when the symptoms pass. self.routine_change_multiplicities = {} self.routine_changes = {} self._infection_data = None # Table that currespond to seir times and events so it will be easier to mange self._seir_times = None self._my_neighborhood = None self._num_infections = 0 #if StartAsRecovered: # self.last_state =RedactedPerson(self.get_age(), self.get_disease_state()) #else: self.last_state = None Person.num_people_so_far += 1
def create_and_run_simulation(self, outdir, stop_early, with_population_caching=True, verbosity=False): """ The main function that handles the run of the simulation by the task. It updated the params changes, loads or creates the population, initializes the simulation and runs it. :param outdir: the output directory for the task :param stop_early: only relevant to R computation, see Simulation doc :param with_population_caching: bool, if False generates the population, else - tries to use the cache and save time. :param verbosity: bool, if it's True then additional output logs will be printed to the screen """ seed.set_random_seed() config_path = os.path.join(os.path.dirname(__file__), "config.json") with open(config_path) as json_data_file: ConfigData = json.load(json_data_file) citiesDataPath = ConfigData['CitiesFilePath'] paramsDataPath = ConfigData['ParamsFilePath'] Extensionslst = ConfigData['ExtensionsNamelst'] Params.load_from(os.path.join(os.path.dirname(__file__), paramsDataPath), override=True) for param, val in self.params_to_change.items(): Params.loader()[param] = val DiseaseState.init_infectiousness_list() citiesDataPath = citiesDataPath population_loader = PopulationLoader( citiesDataPath, added_description=Params.loader().description(), with_caching=with_population_caching, verbosity=verbosity) world = population_loader.get_world(city_name=self.city_name, scale=self.scale, is_smart=True) ExtensionType = None sim = Simulation(world, self.initial_date, self.interventions, verbosity=verbosity, outdir=outdir, stop_early=stop_early) self.infection_params.infect_simulation(sim, outdir) if len(Extensionslst) > 0: sim.run_simulation(self.days, self.scenario_name, datas_to_plot=self.datas_to_plot, extensionsList=Extensionslst) else: sim.run_simulation(self.days, self.scenario_name, datas_to_plot=self.datas_to_plot, extensionsList=None)
def __init__(self): params = Params.loader()['disease_parameters'] self.symptomatic_given_infected_per_age = params[ "symptomatic_given_infected_per_age"] self.hospitalization_given_symptomatic_per_age = params[ "hospitalization_given_symptomatic_per_age"] self.critical_given_hospitalized_per_age = params[ "critical_given_hospitalized_per_age"] self.deceased_given_critical_per_age = params[ "deceased_given_critical_per_age"] self._latent_period_distribution = self.generate_gamma_distribution( params["latent_period_gamma_params"]) self._infectious_before_symptomatic_distribution = self.generate_gamma_distribution( params["infectious_before_symptomatic_gamma_params"]) self._infectious_before_immune_distribution = self.generate_gamma_distribution( params["infectious_before_immune_gamma_params"]) self._symptomatic_before_critical_distribution = self.generate_gamma_distribution( params["symptomatic_before_critical_gamma_params"]) self._symptomatic_before_immune_distribution = self.generate_gamma_distribution( params["symptomatic_before_immune_gamma_params"]) self._critical_before_deceased_distribution = self.generate_gamma_distribution( params["critical_before_deceased_gamma_params"]) self._critical_before_immune_distribution = self.generate_gamma_distribution( params["critical_before_immune_gamma_params"])
def test_SIRS_second_infection(): """ Test that in SIRS model in case that a person get sick twice, (and get recovered between them). He will experience two different time schedule of the illness """ #Pretest Params.clean() SIRS.clean() config_path = os.path.join( os.path.dirname(__file__), "tests_config_files", "test_latent_incubating_critical_immune_config.json") with open(config_path) as json_data_file: ConfigData = json.load(json_data_file) paramsDataPath = ConfigData['ParamsFilePath'] Params.load_from(os.path.join(os.path.dirname(__file__), "tests_params_files", paramsDataPath), override=True) Params.loader()["person"]["state_macine_type"] = "SIRS" p = Person(30) event_lst = p.infect_and_get_events(INITIAL_DATE, InitialGroup.initial_group()) p.set_disease_state(DiseaseState.SUSCEPTIBLE) event_lst2 = p.infect_and_get_events(INITIAL_DATE, InitialGroup.initial_group()) assert not (event_lst == event_lst2)
def city_curfew_routine(person: Person, city_name): """ Create a routine change that represents a city curfew effect on a person routine. Here we try to represent the changing (decreasing/ increasing of weights) of the weight in all the environment, due to the closure of the city. :param person: Person :return: routine change dict, keys are environment names, values are weight multipliers. """ params = Params.loader()["interventions_routines"]["city_curfew"] routine = {} has_free_time = False for env_name, env in person._environments.items(): if env._city is None: continue if not ((env._city.get_name() == city_name) ^ (person.get_city_name() == city_name)): continue else: routine[env_name] = params["out_of_city"] has_free_time = True if has_free_time: routine['household'] = params["in_city"]["household"] routine['city_community'] = params["in_city"]["city_community"] routine['neighborhood_community'] = params["in_city"]["neighborhood_community"] return routine
def quarantine_routine(person: Person): """ Create a routine change that represents a person being in quarantine. Here we try to represent the changing (decreasing of weights) of the weight in all the environment, due to the quarantine. :param person: Person :return: routine change dict, keys are environment names, values are weight multipliers. """ params = Params.loader()["interventions_routines"]["quarantine"] return {env_name: params["all"] for env_name in person.get_routine()}
def write_params(self): """ Write a params.json file corresponding to the parameters used to the output directory (for documentation purposes) """ params_path = os.path.join(self._output_path, 'params.json') params = Params.loader() assert params == self._params_at_init, "Params changed mid-simulation!" params.dump(params_path)
def assert_params_are_valid(params): """ Makes sure the params that appear in a serialized file are the same as the current ones we are using (otherwise there will be compatibility errors) :param params: The params that should be checked against the curret ones :return: None (throws an exception of it does not hold) """ assert params == Params.loader(), \ "Trying to load a file corresponding to different params!"
def _save_on_me(self, world, is_smart): """ Saves a generated World to a local dict on this object :param world: The World we wish to save :param is_smart: Was the world generated using smart world generation :return: None """ tup = (world._generating_city_name, is_smart, world._generating_scale) assert tup not in self.city_smart_scale_to_world, "Dumping an existent city" self.city_smart_scale_to_world[tup] = (world, Params.loader())
def generate_entire_country(city_list, is_smart_household_generation=True, scaling=1.0, verbosity=False): """ Generates the World of a model of the entire country, in the same way as was detailed in the specification document. :param city_list: The list of cities which comprise this model (workplace_city_distribution should already be initialized) :param is_smart_household_generation: Are we using smart_population_generation or naive_population_generation. This should only be False for testing/debugging purposes. :param scaling: The scale that should multiply all of the cities involved. :param verbosity: Whether or not we should print debug information. :return: A world object corresponding to a model of all of these cities. """ all_people = [] all_environments = [] city_to_workers = {city: [] for city in city_list} workplace_node = None city_env_params = Params.loader()['city_environments'] for env_params in city_env_params: if env_params["env_name"] == 'workplace': workplace_node = load_cross_environment_data_from_json(env_params) assert workplace_node is not None, "Could not find workplace in params.json!" for city in city_list: city_people, city_environments = generate_city( city, is_smart_household_generation=is_smart_household_generation, internal_workplaces=False, scaling=scaling, verbosity=verbosity, to_world=False) for person in city_people: all_people.append(person) if workplace_node.age_segment[0] <= person.get_age( ) <= workplace_node.age_segment[1]: workplace_city = city.workplace_city_distribution.sample() city_to_workers[workplace_city].append(person) for environment in city_environments: all_environments.append(environment) for city, workers in city_to_workers.items(): workplaces_in_city, people_segments = divide_people_to_environments( workers, workplace_node.size, Workplace, city, workplace_node.average_daily_contacts, age_segment=workplace_node.age_segment, name=workplace_node.env_name) for workplace in workplaces_in_city: all_environments.append(workplace) return World(all_people, all_environments, 'all', scaling)
def _save_to_file(self, world, is_smart): """ Serializes and saves a generated World to a file :param world: The World to save :param is_smart: Was the world generated using smart world generation :return: None """ filepath = self._get_filepath(world._generating_city_name, is_smart, world._generating_scale) assert not os.path.exists(filepath), "File '%s' already exists!" % filepath log.info("Saving the new results to {} ...".format(filepath)) if not os.path.exists(self.output_dir): os.makedirs(self.output_dir) with open(filepath, 'wb') as f: pickle.dump((world, Params.loader()), f)
def get_infectiousness_map(cls): if cls.infectiousness_factors is None: factors = Params.loader( )["disease_parameters"]["infectiousness_per_stage"] cls.infectiousness_factors = { DiseaseState.INCUBATINGPOSTLATENT: factors["incubating_post_latent"], DiseaseState.ASYMPTOMATICINFECTIOUS: factors["asymptomatic"], DiseaseState.SYMPTOMATICINFECTIOUS: factors["symptomatic"], DiseaseState.CRITICAL: factors["critical"] } return cls.infectiousness_factors
def household_isolation_routine(person: Person): """ Create a routine change that represents a person being in isolation at home. Here we try to represent the changing (decreasing/ increasing of weights) of the weight in all the environment, due to the person staying at home. :param person: Person :return: routine change dict, keys are environment names, values are weight multipliers. """ params = Params.loader()["interventions_routines"]["household_isolation"] routine = {} for env_name in person.get_routine(): if env_name == 'household': routine['household'] = params["household"] else: routine[env_name] = params["other"] return routine
def workplace_closure_routine(person: Person): """ Create a routine change that represents a closure of the person's workplace. Here we try to represent the changing of the weight in the workplace environment as well as other environments, due to the closure. :param person: Person :return: routine change dict, keys are environment names, values are weight multipliers. """ params = Params.loader()["interventions_routines"]["workplace_closure"] routine = {} for env_name in person.get_routine(): if env_name == 'workplace': routine['workplace'] = params["workplace"] else: routine[env_name] = params["other"] return routine
def generate_all_cities_for_jobs(jobs, cpus_to_use): """ Generate all the population needed before the jobs starts to run, and serialize it. That way, The multi processed runs don't try to generate the population at the same time, and can use the cashed results. :param jobs: all the jobs to be running :param cpus_to_use: the number of cpu cores to use, if grater than 1, the run will be multi processed """ appearing_cities = set((job.city_name, job.scale) for job in jobs) appearing_cities_to_params = {city: [] for city in appearing_cities} for job in jobs: appearing_cities_to_params[( job.city_name, job.scale)] += job.get_all_params_changes() for city in appearing_cities: appearing_cities_to_params[city] = list( map( dict, set( tuple(sorted(d.items())) for d in appearing_cities_to_params[city]))) appearing_cities_and_params = [ (city_name, scale, params_to_change) for city_name, scale in appearing_cities for params_to_change in appearing_cities_to_params[(city_name, scale)] ] print("Generating all cities...") if cpus_to_use == 1: for city_name, scale in appearing_cities: create_city_and_serialize(city_name, scale, Params.loader()) for city_name, scale, params_to_change in appearing_cities_and_params: create_city_and_serialize(city_name, scale, params_to_change) else: ctx = mp.get_context("spawn") pool = ctx.Pool(cpus_to_use) futures = [] for city_name, scale, params_to_change in appearing_cities_and_params: futures.append( pool.apply_async(create_city_and_serialize, args=(city_name, scale, params_to_change))) pool.close() pool.join() for future in futures: future.get() print("Done generating cities.")
def social_distancing_routine(person: Person): """ Create a routine change that represents a social distancing effect on a person routine. Here we try to represent the changing (decreasing/ increasing of weights) of the weight in all the environment, due to the closure. :param person: Person :return: routine change dict, keys are environment names, values are weight multipliers. """ params = Params.loader()["interventions_routines"]["social_distancing"] routine = {} for env_name in person.get_routine(): if env_name == 'household': routine['household'] = params["household"] elif env_name == 'workplace': routine['workplace'] = params["workplace"] elif env_name == 'school': routine[env_name] = params["school"] else: routine[env_name] = params["other"] return routine
def generate_all_households_and_communities_of_city_naive(city, scaling=1.0): """ Generates all households of some city and divides them into neighborhoods. :param city: A City object corresponding to where we want to generate a population. :param scaling: A factor to multiply the city.population by (in order to simulate toy examples) :return: A tuple of 3 lists: (a list of all generated people (Person objects), a list of all generated neighborhoods (lists of Person objects), a list of all generate environments (NeighborhoodCommunity objects)) """ #Amit confirmed that we should always use this class #warnings.warn("DEPRECATED CODE! naive city generation should not be used!", DeprecationWarning) assert isinstance(city, City) curr_community = [] all_people = [] all_communities = [] all_environments = [] min_num_people = int(city.population * scaling) params = Params.loader()['population'] while len(all_people) < min_num_people: curr_household = generate_household_in_city(city) curr_household_env = Household( city, params['household_avg_daily_contacts'] / (max(len(curr_household) - 1, 1))) all_environments.append(curr_household_env) for person in curr_household: all_people.append(person) person.add_environment(curr_household_env) curr_community.append(person) if (len(curr_community) >= params['community_approx_size']) or ( len(all_people) >= min_num_people): all_communities.append(curr_community) curr_community_env = NeighborhoodCommunity( city, params['community_avg_daily_contacts'] / float(max(len(curr_community), 1))) all_environments.append(curr_community_env) for person in curr_community: person.add_environment(curr_community_env) curr_community = [] assert len(curr_community) == 0 return all_people, all_communities, all_environments
def __init__(self, output_path, world): self._output_path = output_path if not os.path.isdir(output_path): os.mkdir(output_path) self._days_data = [] self._final_state = None self._interventions = [] self._r0_data = None self.num_infected = 0 self.min_date = None self.max_date = None self.all_environment_names = set( [env._full_name for env in world.all_environments]) self.all_environment_names.add('initial_group') self.full_env_name_to_short_env_name = { 'initial_group': 'initial_group' } for env in world.all_environments: if env._full_name in self.full_env_name_to_short_env_name: assert self.full_env_name_to_short_env_name[ env._full_name] == env.name self.full_env_name_to_short_env_name[env._full_name] = env.name self._params_at_init = Params.loader()
def test_CreateDeltaFileAtlit(helpers): helpers.clean_outputs() config_path = os.path.join(os.path.dirname(__file__), "..", "src", "config.json") with open(config_path) as json_data_file: ConfigData = json.load(json_data_file) citiesDataPath = ConfigData['CitiesFilePath'] paramsDataPath = ConfigData['ParamsFilePath'] Params.load_from(os.path.join(os.path.dirname(__file__), "..", "src", paramsDataPath), override=True) Params.loader()["person"]["state_macine_type"] = "SIR" DiseaseState.init_infectiousness_list() pop = population_loader.PopulationLoader(citiesDataPath) my_world = pop.get_world(city_name='Atlit', scale=1, is_smart=False) sim = Simulation(world=my_world, initial_date=INITIAL_DATE) sim.infect_random_set(num_infected=500, infection_doc="") sim.run_simulation(num_days=180, name="test") #assert events dictionary is not empty txt = sim.stats.get_state_stratified_summary_table( table_format=TableFormat.CSV) test_data = StringIO(txt) tbl = pd.read_csv(test_data) cnt_start = tbl.iloc[0, DiseaseState.SUSCEPTIBLE.value] + tbl.iloc[ 0, DiseaseState.LATENT.value] cnt_end = 0 for i in range(len(tbl)): cnt_end = cnt_end + tbl.iloc[i, DiseaseState.IMMUNE.value] + tbl.iloc[ i, DiseaseState.DECEASED.value] plug_number = len([ p for p in sim._world.all_people() if p.get_disease_state() == DiseaseState.SUSCEPTIBLE ]) assert cnt_start >= cnt_end + plug_number
def generate_city(city, is_smart_household_generation, internal_workplaces=True, scaling=1.0, verbosity=False, to_world=True): """ Generates the population of a city and divides them into environments (as described in the specification document). :param city: The City object corresponding to the city we wish to create :param is_smart_household_generation: Should we use smart_population_generation or naive_population_generation (this should always be naive, unless testing this particular thing) :param internal_workplaces: Should the workplace be generated (from their CrossEnvironmentDatas nodes) or should they not be generated (so they may be generated between different cities). This should be True when generating a model of a single city and False when generating a model for multiple cities. :param scaling: The scale we wish to multiply the city by (see the specification document for details) :param verbosity: Whether or not this should print debug data :param to_world: Should we return a World object or a pair (all_people, all_environments)? This should be True when generating a single city and False when generating multiple cities (so they may be added together before constructing World) :return: Eiteher a World object or a pair (all_people, all_environments), depending on the to_world parameter """ assert isinstance(city, City) if is_smart_household_generation: all_people, all_neighborhoods, all_environments = \ generate_all_households_and_communities_of_city_smart(city, scaling, verbosity=verbosity) else: all_people, all_neighborhoods, all_environments = \ generate_all_households_and_communities_of_city_naive(city, scaling) city_env_params = Params.loader()['city_environments'] cross_environments = [] for env_params in city_env_params: if (env_params["env_name"] == 'workplace') and not internal_workplaces: continue cross_environments.append( load_cross_environment_data_from_json(env_params)) initial_division = {NEIGHBORHOOD: all_neighborhoods, CITY: [all_people]} new_environments = make_cross_environments(all_people, cross_environments, city, initial_division) for env in new_environments: all_environments.append(env) city_community = CityCommunity( city, Params.loader()['population']['city_avg_daily_contacts'] / \ float(len(all_people)) ) for environment in all_environments: city_community.add_environment(environment) environment.set_city_env(city_community) for person in all_people: person.add_environment(city_community) all_environments.append(city_community) if to_world: assert internal_workplaces return World(all_people, all_environments, city.english_name, scaling) assert not internal_workplaces return all_people, all_environments
def test_count_infected_in_hood(): ''' Test that we gather the right data about the infected persons in each neighborhood. ''' config_path = os.path.join(os.path.dirname(__file__),"..","src","config.json") with open(config_path) as json_data_file: ConfigData = json.load(json_data_file) paramsDataPath = ConfigData['ParamsFilePath'] Params.load_from(os.path.join(os.path.dirname(__file__),"..","src", paramsDataPath), override=True) Params.loader()["person"]["state_macine_type"] = "SIR" DiseaseState.init_infectiousness_list() #create diff enviroments house1 = Household(city = None,contact_prob_between_each_two_people=1) house2 = Household(city = None,contact_prob_between_each_two_people=1) house1Ages = [98,95,5] house2Ages = [94,6] house1Lst = list(map(Person, house1Ages)) house2Lst = list(map(Person, house2Ages)) #register people to diff env house1.sign_up_for_today(house1Lst[0],1) house1.sign_up_for_today(house1Lst[1],1) house1.sign_up_for_today(house1Lst[2],1) house2.sign_up_for_today(house2Lst[0],1) house2.sign_up_for_today(house2Lst[1],1) assert len(house1.get_people()) == 3 assert len(house2.get_people()) == 2 n1 = NeighborhoodCommunity(city= None,contact_prob_between_each_two_people= 1) n2 = NeighborhoodCommunity(city= None,contact_prob_between_each_two_people= 1) states_table1 = ((DiseaseState.LATENT,daysdelta(3)), (DiseaseState.ASYMPTOMATICINFECTIOUS,daysdelta(3)), (DiseaseState.IMMUNE, daysdelta(3)), (DiseaseState.IMMUNE, None)) states_table2 = ((DiseaseState.IMMUNE, daysdelta(3)), (DiseaseState.IMMUNE, None)) events_acc = [] for person in house1.get_people(): person.add_environment(n1) events = person.gen_and_register_events_from_seir_times(date = INITIAL_DATE,states_and_times= states_table1) events_acc += events for person in house2.get_people(): person.add_environment(n2) events = person.gen_and_register_events_from_seir_times(date = INITIAL_DATE,states_and_times= states_table2) events_acc += events env_arr = [house1,house2,n1,n2] persons_arr = [] persons_arr += house1Lst persons_arr += house2Lst my_world = World( all_people = persons_arr, all_environments=env_arr, generating_city_name = "test", generating_scale = 1,) my_simulation = Simulation(world = my_world, initial_date= INITIAL_DATE) my_simulation.register_events(events_acc) for i in range(4): d1 = my_simulation.stats.get_neiborhood_data(INITIAL_DATE + daysdelta(i),n1.get_neighborhood_id()) d2 = my_simulation.stats.get_neiborhood_data(INITIAL_DATE + daysdelta(i),n2.get_neighborhood_id()) assert d1 == 0 , "Day:" + str(i) assert d2 == 0 , "Day:" + str(i) my_simulation.simulate_day() for i in range(3): d1 = my_simulation.stats.get_neiborhood_data(INITIAL_DATE + daysdelta(i+3),n1.get_neighborhood_id()) d2 = my_simulation.stats.get_neiborhood_data(INITIAL_DATE + daysdelta(i+3),n2.get_neighborhood_id()) assert d1 == 3 , "Day:" + str(3 + i) assert d2 == 0 , "Day:" + str(3 + i) my_simulation.simulate_day() for i in range(3): d1 = my_simulation.stats.get_neiborhood_data(INITIAL_DATE,n1.get_neighborhood_id()) d2 = my_simulation.stats.get_neiborhood_data(INITIAL_DATE,n2.get_neighborhood_id()) assert d1 == 0 , "Day:" + str(6 + i) assert d2 == 0 , "Day:" + str(6 + i) my_simulation.simulate_day()
def get_rescaled_symptomatic_probs(symptomatic_probs_scale): current_probs = Params.loader( )['disease_parameters']['symptomatic_given_infected_per_age'] return [min(1, symptomatic_probs_scale * t) for t in current_probs]
def school_isolation_intervention(person): params = Params.loader()["interventions_routines"]["school_isolation"] return {'school': params["school"], 'household': params['household'], 'city_community': params['city_community'], 'neighborhood_community': params['neighborhood_community']}
def no_school_routine(person): params = Params.loader()["interventions_routines"]["school_closure"] return {'school': params["school"], 'household': params['household'], 'city_community': params['city_community'], 'neighborhood_community': params['neighborhood_community']}
def params(params_path): Params.load_from(params_path) return Params.loader()
def generate_all_households_and_communities_of_city_smart( city, scaling=1.0, verbosity=False): """ Generates all of the households of some city and divides them into neighborhoods. :param city: A City object corresponding to where we want to generate a population. :param scaling: A factor to multiply the city.population by (in order to simulate toy examples) :param verbosity: If 'True', outputs some data about the distance between the requested and achieved distributions. :return: A tuple of 3 lists: (a list of all generated people (Person objects), a list of all generated neighborhoods (lists of Person objects), a list of all generate environments (NeighborhoodCommunity objects)) """ assert isinstance(city, City) all_people = [] all_environments = [] params = Params.loader()['population'] if verbosity: print("Generating city '%s'" % city.english_name) # Calling the actual code that generates houses households_ages = sim_houses( city.age_data, city.household_size_data, city.percentage_of_households_with_65_plus, city.percentage_of_households_with_17_minus, int(city.total_households * scaling), int(city.population * scaling), city.percentage_of_households_with_single_parent, city.percentage_of_children_with_single_parent_in_household, city.kids_per_household_data) # Converting houses from "lists of ages" to "lists of Person objects" households = [list(map(Person, ha)) for ha in households_ages] # The following is here because the algorithm generates them in a very ordered way, # and we don't want it to cause skews # (this is redundant since the addition of divide_weighted_array, but better safe than sorry) random.shuffle(households) # Computing the distance between the requested and achieved age distribution all_ages_groups = [] for household in households: for person in household: all_ages_groups.append(min(person.get_age() // 5, 15)) age_distribution_dist, sampled_age_distribution = get_distribution_distance( all_ages_groups, [t[0] for t in city.age_data]) if age_distribution_dist > 0.15: print("WARNING probabilities far from original in city '%s'" % city.english_name) # Computing the distance between the requested and achieved household size distribution all_household_sizes = list( map(lambda l: min(len(l), len(city.household_size_data) - 1), households_ages)) household_size_distribution_dist, sampled_household_size_distribution = get_distribution_distance( all_household_sizes, city.household_size_data) if household_size_distribution_dist > 0.15: print("WARNING probabilities far from original in city '%s'" % city.english_name) num_houses_got = len(households_ages) num_people_got = len(all_ages_groups) if verbosity: print("Was looking for age distribution of") print([t[0] for t in city.age_data]) print("And got") print(sampled_age_distribution) print("Age distribution distance of", age_distribution_dist) print("Was looking for") print(city.household_size_data) print("And got") print(sampled_household_size_distribution) print("Household size distribution distance of", household_size_distribution_dist) print("Relative error in number of houses of", num_houses_got / int(city.total_households * scaling) - 1) print("Was looking for", int(city.total_households * scaling)) print("And got", num_houses_got) print("Relative error in number of people of", num_people_got / int(city.population * scaling) - 1) print("Was looking for", int(city.population * scaling)) print("And got", num_people_got) print("----------------------------------------------------") # Divide households to neighborhoods all_neighborhood_households = divide_weighted_array( [(household, len(household)) for household in households], params['community_approx_size']) # Transfer the format to the requested output format all_neighborhoods = [] for curr_neighborhood_households in all_neighborhood_households: curr_neighborhood = [] for curr_household in curr_neighborhood_households: curr_household_env = Household( city, params['household_avg_daily_contacts'] / max( (len(curr_household) - 1, 1))) all_environments.append(curr_household_env) for person in curr_household: all_people.append(person) curr_neighborhood.append(person) person.add_environment(curr_household_env) all_neighborhoods.append(curr_neighborhood) curr_community_env = NeighborhoodCommunity( city, params['community_avg_daily_contacts'] / float(max(len(curr_neighborhood), 1))) all_environments.append(curr_community_env) for person in curr_neighborhood: person.add_environment(curr_community_env) assert sum([len(neighborhood) for neighborhood in all_neighborhoods]) == len(all_people), \ "Internal error in population generation" return all_people, all_neighborhoods, all_environments
def __init__(self): super().__init__() params = Params.loader()['disease_parameters'] self._immuned_before_susceptible_distribution = self.generate_gamma_distribution( params["Immuned_before_susceptible_gamma_params"])