def generate_camp(): world = generate_empty_world({"region": ["CXB-219"]}) populate_world(world) # distribute people to households distribute_people_to_households(world) # medical facilities hospitals = Hospitals.from_file(filename=camp_data_path / "input/hospitals/hospitals.csv") world.hospitals = hospitals for hospital in world.hospitals: hospital.area = world.areas.members[0] hospital_distributor = HospitalDistributor(hospitals, medic_min_age=20, patients_per_medic=10) world.isolation_units = IsolationUnits( [IsolationUnit(area=world.areas[0])]) hospital_distributor.distribute_medics_from_world(world.people) world.learning_centers = LearningCenters.for_areas(world.areas, n_shifts=4) world.pump_latrines = PumpLatrines.for_areas(world.areas) world.play_groups = PlayGroups.for_areas(world.areas) world.distribution_centers = DistributionCenters.for_areas(world.areas) world.communals = Communals.for_areas(world.areas) world.female_communals = FemaleCommunals.for_areas(world.areas) world.religiouss = Religiouss.for_areas(world.areas) world.e_vouchers = EVouchers.for_areas(world.areas) world.n_f_distribution_centers = NFDistributionCenters.for_areas( world.areas) world.shelters = Shelters.for_areas(world.areas) world.cemeteries = Cemeteries() shelter_distributor = ShelterDistributor( sharing_shelter_ratio=0.75 ) # proportion of families that share a shelter for area in world.areas: shelter_distributor.distribute_people_in_shelters( area.shelters, area.households) return world
def test__social_distancing(self, setup_policy_world): world, pupil, student, worker, sim = setup_policy_world world.cemeteries = Cemeteries() start_date = datetime(2020, 3, 10) end_date = datetime(2020, 3, 12) beta_factors = { "box": 0.5, "pub": 0.5, "grocery": 0.5, "cinema": 0.5, "commute_unit": 0.5, "commute_city_unit": 0.5, "hospital": 0.5, "care_home": 0.5, "company": 0.5, "school": 0.5, "household": 1.0, "university": 0.5, } social_distance = SocialDistancing(start_time="2020-03-10", end_time="2020-03-12", beta_factors=beta_factors) beta_factors2 = {"cinema": 4} start_date2 = datetime(2020, 3, 12) end_date2 = datetime(2020, 3, 15) social_distance2 = SocialDistancing(start_time="2020-03-12", end_time="2020-03-15", beta_factors=beta_factors2) policies = Policies([social_distance, social_distance2]) leisure_instance = leisure.generate_leisure_for_config( world=world, config_filename=test_config) leisure_instance.distribute_social_venues_to_households( world.households) sim.activity_manager.policies = policies sim.activity_manager.leisure = leisure_instance sim.timer.reset() initial_betas = copy.deepcopy(sim.interaction.beta) sim.clear_world() while sim.timer.date <= sim.timer.final_date: sim.do_timestep() if sim.timer.date >= start_date and sim.timer.date < end_date: for group in sim.interaction.beta: if group != "household": assert sim.interaction.beta[ group] == initial_betas[group] * 0.5 else: assert sim.interaction.beta[group] == initial_betas[ group] next(sim.timer) continue if sim.timer.date >= start_date2 and sim.timer.date < end_date2: for group in sim.interaction.beta: if group != "cinema": assert sim.interaction.beta == 4.0 else: assert sim.interaction.beta[group] == initial_betas[ group] next(sim.timer) continue assert sim.interaction.beta == initial_betas next(sim.timer)
def simulation(args): gf.print_flush(args) print("Physical cores:", psutil.cpu_count(logical=False)) print("Total cores:", psutil.cpu_count(logical=True)) print("=" * 20, "Memory Information", "=" * 20) # get the memory details svmem = psutil.virtual_memory() print(f"Total: {get_size(svmem.total)}") print(f"Available: {get_size(svmem.available)}") print(f"Used: {get_size(svmem.used)}") print(f"Percentage: {svmem.percent}%") pid = os.getpid() py = psutil.Process(pid) memoryUse = py.memory_info()[0] # initialise world from file gf.print_flush("Initialising world...") world_file = "{}.hdf5".format(args.world) world = generate_world_from_hdf5(world_file, chunk_size=1_000_000) gf.print_flush("World loaded successfully...") geography = load_geography_from_hdf5(world_file) # leisure gf.print_flush("Initialising leisure...") world.pubs = Pubs.for_geography(geography) world.cinemas = Cinemas.for_geography(geography) world.groceries = Groceries.for_super_areas(geography.super_areas) # cemeteries gf.print_flush("Initialising cemeteries...") world.cemeteries = Cemeteries() # commute gf.print_flush("Initialising commute...") world.initialise_commuting() # infection selector gf.print_flush("Selecting infection...") selector = InfectionSelector.from_file() interaction = ContactAveraging.from_file(selector=selector) lhs_array = np.load("lhs_array.npy") parameters = generate_parameters_from_lhs(lhs_array, args.idx) interaction = set_interaction_parameters(parameters, interaction) gf.print_flush("Betas = {}, alpha = {}".format(interaction.beta, interaction.alpha_physical)) if not os.path.exists(SAVE_PATH): os.makedirs(SAVE_PATH) # save out parameters for later with open(SAVE_PATH + '/parameters.json', 'w') as f: json.dump(parameters, f) # seed infections seed = Seed.from_file( super_areas=world.super_areas, selector=selector, ) print(f"Memory used by JUNE's world: {get_size(memoryUse)}") simulator = Simulator.from_file(world, interaction, selector, seed=seed, config_filename=CONFIG_PATH, save_path=SAVE_PATH) simulator.run() # read = ReadLogger(SAVE_PATH) # world_df = read.world_summary() # ages_df = read.age_summary([0,10,20,30,40, # 50,60,70,80,90,100]) # loc_df = read.get_locations_infections() # r_df = read.get_r() # world_df.to_csv(SAVE_PATH + '/world_df.csv') # ages_df.to_csv(SAVE_PATH + '/ages_df.csv') # loc_df.to_csv(SAVE_PATH + '/loc_df.csv') # r_df.to_csv(SAVE_PATH + '/r_df.csv') gf.print_flush("Simulation finished!!!!") return None
def simulation(args): gf.print_flush(args) msoaslist = [ "E02005702", "E02005704", "E02005736", "E02005734", "E02001697", "E02001701", "E02001704", "E02001702", "E02001812", "E02001803", "E02001806", "E02001796", "E02001801", "E02001802", "E02001795", "E02001818", "E02001821", "E02001814", "E02001808", "E02001817", "E02001816", "E02001819", "E02001813", "E02001804", "E02001811", "E02001805", "E02001791", "E02001794", "E02001792", "E02004320", "E02004321", "E02004322", "E02004325", "E02004327", "E02004329", "E02004330", "E02004328", "E02001798", "E02001793", "E02005706", "E02002496", "E02002498", "E02002500", "E02002503", "E02002504", "E02002515", "E02002516", "E02006910", "E02002518", "E02002519", "E02002513", "E02002550", "E02002555", "E02002549", "E02002542", "E02002547", "E02002545", "E02002543", "E02002537", "E02002544", "E02002541", "E02002523", "E02002540", "E02002536", "E02002538", "E02002535", "E02006909", "E02002489", "E02002484", "E02002487", "E02002485", "E02002483", "E02002493", "E02002490", "E02002492", "E02002494", "E02002488", "E02002491", "E02004332", "E02002505", "E02002497", "E02002502", "E02006812", "E02002499", "E02002506", "E02006811", "E02002509", "E02002501", "E02002508", "E02002507", "E02002529", "E02002514", "E02002512" ] gf.print_flush("Generating world from msoalist...") geography = Geography.from_file({"msoa": msoaslist}) print('memory % used:', psutil.virtual_memory()[2]) geography.hospitals = Hospitals.for_geography(geography) geography.schools = Schools.for_geography(geography) geography.companies = Companies.for_geography(geography) geography.care_homes = CareHomes.for_geography(geography) demography = Demography.for_geography(geography) gf.print_flush("Geography and demography generated...") world = World(geography, demography, include_households=True, include_commute=False) gf.print_flush("World generated...") # leisure world.cinemas = Cinemas.for_geography(geography) world.pubs = Pubs.for_geography(geography) world.groceries = Groceries.for_super_areas(world.super_areas, venues_per_capita=1 / 500) gf.print_flush("Initialised leisure...") # commute world.initialise_commuting() gf.print_flush("Initialised commute...") # cemeteries world.cemeteries = Cemeteries() gf.print_flush("Initialised cemeteries...") # infection selector selector = InfectionSelector.from_file() interaction = ContactAveraging.from_file(selector=selector) gf.print_flush("Infection selected...") # define groups for betas groups = { "leisure": ['pub', 'grocery', 'cinema'], "commute": ['commute_unit', 'commute_city_unit', 'travel_unit'], "hospital": ['hospital'], "care_home": ['care_home'], "company": ['company'], "school": ['school'], "household": ['household'] } # define problem for latin hypercube sampling problem = { 'num_vars': len(groups), 'names': list(groups.keys()), 'bounds': [[1, 2] for _ in range(len(groups))] } lhs = latin.sample(problem, N=args.num_runs, seed=1)[args.idx] betas = {} for i, key in enumerate(groups): for sub_key in groups[key]: betas[sub_key] = lhs[i] # save out betas for later with open(SAVE_PATH + '/betas.json', 'w') as f: json.dump(betas, f) # set betas in simulation to sampled ones for key in betas: interaction.beta[key] = betas[key] gf.print_flush(interaction.beta) # seed infections seed = Seed( world.super_areas, selector, ) n_cases = int(len(world.people) / 10) seed.unleash_virus(n_cases) simulator = Simulator.from_file(world, interaction, selector, config_filename=CONFIG_PATH, save_path=SAVE_PATH) print('memory % used:', psutil.virtual_memory()[2]) simulator.run() gf.print_flush("Simulation finished!!!!") return None
world = generate_world_from_geography(geography, include_households=True, include_commute=True) print("World length", len(world.people)) world.to_hdf5("world.hdf5") world = generate_world_from_hdf5("world.hdf5") # leisure geography = load_geography_from_hdf5("world.hdf5") world.cinemas = Cinemas.for_geography(geography) world.pubs = Pubs.for_geography(geography) world.groceries = Groceries.for_super_areas(world.super_areas, venues_per_capita=1 / 500) world.cemeteries = Cemeteries() selector = InfectionSelector.from_file() interaction = Interaction.from_file() print(interaction.beta) # modify interactions (example x 2) interaction.beta['household'] *= 2 print(interaction.beta) interaction.alpha_physical interaction.alpha_physical /= 2 interaction.alpha_physical # # Seed the disease # There are two options implemented in the seed at the moment, either you specify the number of cases and these are then homogeneously distributed by population to the different areas, or you use UK data on cases per region. For now use the first case. seed = InfectionSeed( world.super_areas,
def generate_domain_from_hdf5(domain_id, super_areas_to_domain_dict: dict, file_path: str, chunk_size=500000) -> "Domain": """ Loads the world from an hdf5 file. All id references are substituted by actual references to the relevant instances. Parameters ---------- file_path path of the hdf5 file chunk_size how many units of supergroups to process at a time. It is advise to keep it around 1e6 """ logger.info(f"loading domain {domain_id} from HDF5") # import here to avoid recurisve imports from june.domain import Domain # get the super area ids of this domain super_area_ids = set() for super_area, did in super_areas_to_domain_dict.items(): if did == domain_id: super_area_ids.add(super_area) domain = Domain() # get keys in hdf5 file with h5py.File(file_path, "r", libver="latest", swmr=True) as f: f_keys = list(f.keys()).copy() geography = load_geography_from_hdf5(file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids) domain.areas = geography.areas area_ids = set([area.id for area in domain.areas]) domain.super_areas = geography.super_areas domain.regions = geography.regions # load world data if "hospitals" in f_keys: logger.info("loading hospitals...") domain.hospitals = load_hospitals_from_hdf5( file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "schools" in f_keys: logger.info("loading schools...") domain.schools = load_schools_from_hdf5( file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "companies" in f_keys: domain.companies = load_companies_from_hdf5( file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "care_homes" in f_keys: logger.info("loading care homes...") domain.care_homes = load_care_homes_from_hdf5( file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "universities" in f_keys: logger.info("loading universities...") domain.universities = load_universities_from_hdf5( file_path=file_path, chunk_size=chunk_size, domain_areas=area_ids, ) if "cities" in f_keys: logger.info("loading cities...") domain.cities = load_cities_from_hdf5( file_path=file_path, domain_super_areas=super_area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "stations" in f_keys: logger.info("loading stations...") ( domain.stations, domain.inter_city_transports, domain.city_transports, ) = load_stations_from_hdf5( file_path, domain_super_areas=super_area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "households" in f_keys: domain.households = load_households_from_hdf5( file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids) if "population" in f_keys: domain.people = load_population_from_hdf5( file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids) if "social_venues" in f_keys: logger.info("loading social venues...") social_venues_dict = load_social_venues_from_hdf5( file_path, domain_areas=area_ids) for social_venues_spec, social_venues in social_venues_dict.items(): setattr(domain, social_venues_spec, social_venues) # restore world logger.info("restoring world...") restore_geography_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "population" in f_keys: restore_population_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "households" in f_keys: restore_households_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "care_homes" in f_keys: logger.info("restoring care homes...") restore_care_homes_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "hospitals" in f_keys: logger.info("restoring hospitals...") restore_hospital_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, domain_areas=area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "companies" in f_keys: logger.info("restoring companies...") restore_companies_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "schools" in f_keys: logger.info("restoring schools...") restore_school_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, ) if "universities" in f_keys: logger.info("restoring unis...") restore_universities_properties_from_hdf5(world=domain, file_path=file_path, domain_areas=area_ids) if "cities" and "stations" in f_keys: logger.info("restoring commute...") restore_cities_and_stations_properties_from_hdf5( world=domain, file_path=file_path, chunk_size=chunk_size, domain_super_areas=super_area_ids, super_areas_to_domain_dict=super_areas_to_domain_dict, ) if "social_venues" in f_keys: logger.info("restoring social venues...") restore_social_venues_properties_from_hdf5(world=domain, file_path=file_path, domain_areas=area_ids) domain.cemeteries = Cemeteries() return domain
def generate_world_from_hdf5(file_path: str, chunk_size=500000) -> World: """ Loads the world from an hdf5 file. All id references are substituted by actual references to the relevant instances. Parameters ---------- file_path path of the hdf5 file chunk_size how many units of supergroups to process at a time. It is advise to keep it around 1e6 """ logger.info("loading world from HDF5") world = World() with h5py.File(file_path, "r", libver="latest", swmr=True) as f: f_keys = list(f.keys()).copy() geography = load_geography_from_hdf5(file_path=file_path, chunk_size=chunk_size) world.areas = geography.areas world.super_areas = geography.super_areas world.regions = geography.regions if "hospitals" in f_keys: logger.info("loading hospitals...") world.hospitals = load_hospitals_from_hdf5(file_path=file_path, chunk_size=chunk_size) if "schools" in f_keys: logger.info("loading schools...") world.schools = load_schools_from_hdf5(file_path=file_path, chunk_size=chunk_size) if "companies" in f_keys: world.companies = load_companies_from_hdf5(file_path=file_path, chunk_size=chunk_size) if "care_homes" in f_keys: logger.info("loading care homes...") world.care_homes = load_care_homes_from_hdf5(file_path=file_path, chunk_size=chunk_size) if "universities" in f_keys: logger.info("loading universities...") world.universities = load_universities_from_hdf5(file_path=file_path, chunk_size=chunk_size) if "cities" in f_keys: logger.info("loading cities...") world.cities = load_cities_from_hdf5(file_path) if "stations" in f_keys: logger.info("loading stations...") ( world.stations, world.inter_city_transports, world.city_transports, ) = load_stations_from_hdf5(file_path) if "households" in f_keys: world.households = load_households_from_hdf5(file_path, chunk_size=chunk_size) if "population" in f_keys: world.people = load_population_from_hdf5(file_path, chunk_size=chunk_size) if "social_venues" in f_keys: logger.info("loading social venues...") social_venues_dict = load_social_venues_from_hdf5(file_path) for social_venues_spec, social_venues in social_venues_dict.items(): setattr(world, social_venues_spec, social_venues) # restore world logger.info("restoring world...") restore_geography_properties_from_hdf5(world=world, file_path=file_path, chunk_size=chunk_size) if "population" in f_keys: restore_population_properties_from_hdf5(world=world, file_path=file_path, chunk_size=chunk_size) if "households" in f_keys: restore_households_properties_from_hdf5(world=world, file_path=file_path, chunk_size=chunk_size) if "care_homes" in f_keys: logger.info("restoring care homes...") restore_care_homes_properties_from_hdf5(world=world, file_path=file_path, chunk_size=chunk_size) if "hospitals" in f_keys: logger.info("restoring hospitals...") restore_hospital_properties_from_hdf5(world=world, file_path=file_path, chunk_size=chunk_size) if "cities" and "stations" in f_keys: logger.info("restoring commute...") restore_cities_and_stations_properties_from_hdf5(world=world, file_path=file_path, chunk_size=chunk_size) if "companies" in f_keys: logger.info("restoring companies...") restore_companies_properties_from_hdf5( world=world, file_path=file_path, chunk_size=chunk_size, ) if "schools" in f_keys: logger.info("restoring schools...") restore_school_properties_from_hdf5( world=world, file_path=file_path, chunk_size=chunk_size, ) if "universities" in f_keys: logger.info("restoring unis...") restore_universities_properties_from_hdf5(world=world, file_path=file_path) if "social_venues" in f_keys: logger.info("restoring social venues...") restore_social_venues_properties_from_hdf5(world=world, file_path=file_path) world.cemeteries = Cemeteries() return world
def make_dummy_world(geog): super_area = geog.super_areas.members[0] company = Company(super_area=super_area, n_workers_max=100, sector="Q") household1 = Household() household1.area = super_area.areas[0] hospital = Hospital( n_beds=40, n_icu_beds=5, super_area=super_area.name, coordinates=super_area.coordinates, ) uni = University( coordinates=super_area.coordinates, n_students_max=2500, ) worker1 = Person.from_attributes(age=44, sex='f', ethnicity='A1', socioecon_index=5) worker1.area = super_area.areas[0] household1.add(worker1, subgroup_type=household1.SubgroupType.adults) worker1.sector = "Q" company.add(worker1) worker2 = Person.from_attributes(age=42, sex='m', ethnicity='B1', socioecon_index=5) worker2.area = super_area.areas[0] household1.add(worker2, subgroup_type=household1.SubgroupType.adults) worker2.sector = "Q" company.add(worker2) student1 = Person.from_attributes(age=20, sex='f', ethnicity='A1', socioecon_index=5) student1.area = super_area.areas[0] household1.add(student1, subgroup_type=household1.SubgroupType.adults) uni.add(student1) pupil1 = Person.from_attributes(age=8, sex='m', ethnicity='C1', socioecon_index=5) pupil1.area = super_area.areas[0] household1.add(pupil1, subgroup_type=household1.SubgroupType.kids) #school.add(pupil1) pupil2 = Person.from_attributes(age=5, sex='f', ethnicity='A1', socioecon_index=5) pupil2.area = super_area.areas[0] household1.add(pupil2, subgroup_type=household1.SubgroupType.kids) #school.add(pupil2) world = World() world.schools = Schools([]) world.hospitals = Hospitals([hospital]) world.households = Households([household1]) world.universities = Universities([uni]) world.companies = Companies([company]) world.people = Population([worker1, worker2, student1, pupil1, pupil2]) world.super_areas = geog.super_areas world.areas = geog.areas world.cemeteries = Cemeteries() cinema = Cinema() cinema.coordinates = super_area.coordinates world.cinemas = Cinemas([cinema]) pub = Pub() pub.coordinates = super_area.coordinates world.pubs = Pubs([pub]) world.areas[0].people = world.people return world