def test_parameters(): # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e params.set_disease("ncov") params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # extra parameters that are set params.UV = 1.0 params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 variables = params.read_variables(ncovparams_csv, 0) params = params.set_variables(variables[0]) # make sure that we can correctly pickle and unpickle these parameters data = pickle.dumps(params) print(f"Picked params to {data}") params2 = pickle.loads(data) assert params == params2
def test_go_partial(prompt=None, nthreads=1): import random seed = random.randint(100000, 1000000) # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(os.path.join(script_dir, "data", "ncov.json")) params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # the size of the starting population population = Population(initial=57104043) nsteps = 20 demographics = Demographics.load(redblue_json) print("Building the network...") network = Network.build(params=params) network = network.specialise(demographics, nthreads=nthreads) outdir = os.path.join(script_dir, "test_go_to") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, mixer=mix_evenly, mover=move_partial, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) # 70% of the population (roughly) should be in "blue" pop = trajectory[-1] print(pop) frac0 = pop.subpops[0].population / pop.population frac1 = pop.subpops[1].population / pop.population print(frac0, frac1) # pytest.approx error is percent, e.g. 0.05 is within 5% assert pytest.approx(frac0, 0.1) == 0.3 assert pytest.approx(frac1, 0.05) == 0.7 assert pytest.approx(frac0+frac1, 0.01) == 1.0
def test_move_isolate(prompt=None, nthreads=1): # user input parameters import random seed = random.randint(100000, 1000000) # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(os.path.join(script_dir, "data", "ncov.json")) params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # the size of the starting population population = Population(initial=57104043) nsteps = 20 demographics = Demographics.load(isolate_json) print("Building the network...") network = Network.build(params=params) network = network.specialise(demographics, nthreads=nthreads) outdir = os.path.join(script_dir, "test_zero_demographic_output") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, profiler=None, mixer=mix_isolate, mover=move_isolate, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) # there should be no latents in the 'isolate' demographic assert trajectory[-1].subpops[1].latent == 0 print("End of the run")
def test_go_stage(prompt=None, nthreads=1): seed = 797747 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(os.path.join(script_dir, "data", "ncov.json")) params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # the size of the starting population population = Population(initial=57104043) nsteps = 20 demographics = Demographics.load(redblue_json) print("Building the network...") network = Network.build(params=params) network = network.specialise(demographics, nthreads=nthreads) outdir = os.path.join(script_dir, "test_go_to") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, mixer=mix_evenly, mover=move_stage, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) # red demographic was seeded, but all moved to blue, so should have # no outbreak pop = trajectory[-1] print(pop)
def test_adjustable(): params = Parameters.load() params.set_disease("lurgy") variables = VariableSet() variables["user.something[5]"] = 0.5 variables["user.something[2]"] = 0.3 variables["user.another[1]"] = 0.8 variables["user.flag"] = True # this will be converted to 1.0 variables["beta[2]"] = 0.2 variables["too_ill_to_move[1]"] = 0.15 variables["progress[0]"] = 0.99 variables["contrib_foi[4]"] = 0.45 variables["length_day"] = 0.75 variables["UV"] = 0.4 with pytest.raises(KeyError): variables["broken"] = 0.9 with pytest.raises(KeyError): variables["Beta[2]"] = 0.8 variables.adjust(params) print(params) print(params.disease_params) print(params.user_params) assert variables in params.adjustments print(params.adjustments) assert params.user_params["something"][5] == 0.5 assert params.user_params["something"][2] == 0.3 assert params.user_params["another"][1] == 0.8 assert params.user_params["flag"] == 1.0 assert params.disease_params.beta[2] == 0.2 assert params.disease_params.too_ill_to_move[1] == 0.15 assert params.disease_params.progress[0] == 0.99 assert params.disease_params.contrib_foi[4] == 0.45 assert params.length_day == 0.75 assert params.UV == 0.4
def _get_network(nthreads): global _network if _network is not None: return _network.copy() inputfile = ncovparams_csv line_num = 0 UV = 0.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(os.path.join(script_dir, "data", "ncov.json")) params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # start from the parameters in the specified line number of the # provided input file variables = params.read_variables(inputfile, line_num) # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 demographics = Demographics.load(redgreenblue_json) print("Building the network...") network = Network.build(params=params) network = network.specialise(demographics, nthreads=nthreads) _network = network return network
def test_output(): try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease("ncov") params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # the size of the starting population population = Population(initial=57104043) print("Building the network...") network = Network.build(params=params) from metawards.iterators import build_custom_iterator iterator = build_custom_iterator(iterate_debug, __name__) print("Run the model...") outdir = os.path.join(script_dir, "test_output") with OutputFiles(outdir, force_empty=True, prompt=None) as output_dir: with Console.redirect_output(outdir, auto_bzip=True): trajectory = network.run(population=population, output_dir=output_dir, iterator=iterator, nthreads=1) OutputFiles.remove(outdir, prompt=None) print(trajectory[-1]) print("End of the run")
def test_variable_pathway(): demographics_json = os.path.join(script_dir, "data", "red_one_blue.json") variables_csv = os.path.join(script_dir, "data", "demographic_scan.csv") demographics = Demographics.load(demographics_json) variables = VariableSets.read(variables_csv) print(variables[0].fingerprint(include_index=True)) params = Parameters.load() params.set_disease("lurgy") params.set_input_files("single") network = Network.build(params) network = network.specialise(demographics) params = network.params.set_variables(variables[0]) assert params.disease_params.beta == [0.0, 0.0, 0.1, 0.2, 0.0] assert params["overall"].disease_params.beta == \ [0.0, 0.0, 0.1, 0.2, 0.0] assert params["red one"].disease_params.beta == \ [0.0, 0.0, 0.1, 0.5, 0.27] print(params["blue"].disease_params.beta) assert params["blue"].disease_params.beta == \ [0.0, 0.0, 0.1, 0.2, 0.25, 0.0] network.update(params) d = network.params.disease_params print(d.beta) assert d.beta == [0.0, 0.0, 0.1, 0.2, 0.0] d = network.subnets[0].params.disease_params print(d.beta) assert d.beta == [0.0, 0.0, 0.1, 0.5, 0.27] d = network.subnets[1].params.disease_params print(d.beta) assert d.beta == [0.0, 0.0, 0.1, 0.2, 0.25, 0.0]
def test_duplicated_harmonise(): params = Parameters.load() params.set_disease("ncov") demographics = Demographics.load(multinetwork_json) network = demographics.build(params=params) assert len(network.subnets) == 5 main = Wards.from_json(main_json) students = Wards.from_json(students_json) teachers = Wards.from_json(teachers_json) assert network.overall.population == main.population() + \ students.population() + \ teachers.population() assert network.subnets[0].population == main.population() assert network.subnets[1].population == int(0.7 * teachers.population()) assert network.subnets[2].population == int(0.7 * students.population()) assert network.subnets[3].population == int(0.3 * teachers.population()) assert network.subnets[4].population == int(0.3 * students.population())
def test_integration_pox(prompt=None): """This test repeats main_RepeatsNcov.c and validates that the various stages report the same results as the original C code for the POX """ # user input parameters seed = 15324 inputfile = ncovparams_csv line_num = 0 UV = 1.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease("pox") params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # start from the parameters in the specified line number of the # provided input file variables = params.read_variables(inputfile, line_num) # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=57104043) profiler = Profiler() print("Building the network...") network = Network.build(params=params, profiler=profiler) params = params.set_variables(variables[0]) network.update(params, profiler=profiler) outdir = os.path.join(script_dir, "test_integration_output") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: print("Run the model...") trajectory = network.run(population=population, seed=seed, output_dir=output_dir, nsteps=31, profiler=profiler, nthreads=1) OutputFiles.remove(outdir, prompt=None) print("End of the run") Console.print_profiler(profiler) Console.rule("Model output") Console.print_population(trajectory[-1]) # The original C code has this expected population after 47 steps expected = Population(initial=57104043, susceptibles=56080780, latent=374, total=370, recovereds=553, n_inf_wards=289, day=31) Console.rule("Expected output") Console.print_population(expected) assert trajectory[-1] == expected
def test_iterator(): """This test repeats main_RepeatsNcov.c and validates that the various stages report the same results as the original C code for ncov, when using a custom integrator that just calls iterate_weekday """ prompt = None # user input parameters seed = 15324 inputfile = ncovparams_csv line_num = 0 UV = 1.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease("ncov") params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # start from the parameters in the specified line number of the # provided input file variables = params.read_variables(inputfile, line_num) # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=57104043) profiler = Profiler() print("Building the network...") network = Network.build(params=params, profiler=profiler) params = params.set_variables(variables[0]) network.update(params, profiler=profiler) # Here is a custom integrator function that just calls # iterate_weekday after 'print_hello' def print_hello(**kwargs): print(f"Hello") def my_iterator(**kwargs): from metawards.iterators import iterate_weekday return [print_hello] + iterate_weekday(**kwargs) from metawards.iterators import build_custom_iterator iterator = build_custom_iterator(my_iterator, __name__) print("Run the model...") outdir = os.path.join(script_dir, "test_integrator_output") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.run(population=population, seed=seed, output_dir=output_dir, nsteps=29, profiler=profiler, iterator=iterator, nthreads=1) OutputFiles.remove(outdir, prompt=None) print("End of the run") print(f"Model output: {trajectory}") print(profiler) # The original C code has this expected population after 47 steps expected = Population(initial=57104043, susceptibles=56081710, latent=145, total=48, recovereds=174, n_inf_wards=39, day=29) print(f"Expect output: {expected}") assert trajectory[-1] == expected
def cli(): """Main function for the command line interface. This does one of three things: 1. If this is the main process, then it parses the arguments and runs and manages the jobs 2. If this is a worker process, then it starts up and waits for work 3. If this is a supervisor process, then it query the job scheduling system for information about the compute nodes to use, and will then set up and run a manager (main) process that will use those nodes to run the jobs """ from metawards.utils import Console # get the parallel scheme now before we import any other modules # so that it is clear if mpi4py or scoop (or another parallel module) # has been imported via the required "-m module" syntax parallel_scheme = get_parallel_scheme() if parallel_scheme == "mpi4py": from mpi4py import MPI comm = MPI.COMM_WORLD nprocs = comm.Get_size() rank = comm.Get_rank() if rank != 0: # this is a worker process, so should not do anything # more until it is given work in the pool Console.print(f"Starting worker process {rank+1} of {nprocs-1}...") return else: Console.print("Starting main process...") elif parallel_scheme == "scoop": Console.print("STARTING SCOOP PROCESS") import sys args, parser = parse_args() if not args.already_supervised: hostfile = get_hostfile(args) if hostfile: # The user has asked to run a parallel job - this means that this # process is the parallel supervisor if args.mpi: mpi_supervisor(hostfile, args) return elif args.scoop: scoop_supervisor(hostfile, args) return # neither is preferred - if scoop is installed then use that try: import scoop # noqa - disable unused warning have_scoop = True except Exception: have_scoop = False if have_scoop: scoop_supervisor(hostfile, args) return # do we have MPI? try: import mpi4py # noqa - disable unused warning have_mpi4py = True except Exception: have_mpi4py = False if have_mpi4py: mpi_supervisor(hostfile, args) return # we don't have any other option, just keep going and # use multiprocessing - in this case we don't need a # supervisor and this is the main process # This is now the code for the main process # WE NEED ONE OF these listed options; should_run = False for arg in [ args.input, args.repeats, args.disease, args.additional, args.model, args.iterator, args.extractor, args.demographics, args.mixer, args.mover ]: if arg is not None: should_run = True break if not should_run: parser.print_help(sys.stdout) sys.exit(0) if args.repeats is None: args.repeats = [1] # import the parameters here to speed up the display of help from metawards import Parameters, Network, Population, print_version_string # print the version information first, so that there is enough # information to enable someone to reproduce this run print_version_string() Console.rule("Initialise") if args.input: # get the line numbers of the input file to read if args.line is None or len(args.line) == 0: linenums = None Console.print(f"* Using parameters from all lines of {args.input}", markdown=True) else: from metawards.utils import string_to_ints linenums = string_to_ints(args.line) if len(linenums) == 0: Console.error(f"You cannot read no lines from {args.input}?") sys.exit(-1) elif len(linenums) == 1: Console.print( f"* Using parameters from line {linenums[0]} of " f"{args.input}", markdown=True) else: Console.print( f"* Using parameters from lines {linenums} of " f"{args.input}", markdown=True) from metawards import VariableSets, VariableSet variables = VariableSets.read(filename=args.input, line_numbers=linenums) else: from metawards import VariableSets, VariableSet # create a VariableSets with one null VariableSet variables = VariableSets() variables.append(VariableSet()) nrepeats = args.repeats if nrepeats is None or len(nrepeats) < 1: nrepeats = [1] if len(nrepeats) > 1 and len(variables) != len(nrepeats): Console.error(f"The number of repeats {len(nrepeats)} must equal the " f"number of adjustable variable lines {len(variables)}") raise ValueError("Disagreement in the number of repeats and " "adjustable variables") # ensure that all repeats are >= 0 nrepeats = [0 if int(x) < 0 else int(x) for x in nrepeats] if sum(nrepeats) == 0: Console.error(f"The number of the number of repeats is 0. Are you " f"sure that you don't want to run anything?") raise ValueError("Cannot run nothing") if len(nrepeats) == 1 and nrepeats[0] == 1: Console.print("* Performing a single run of each set of parameters", markdown=True) elif len(nrepeats) == 1: Console.print( f"* Performing {nrepeats[0]} runs of each set of parameters", markdown=True) else: Console.print( f"* Performing {nrepeats} runs applied to the parameters", markdown=True) variables = variables.repeat(nrepeats) # working out the number of processes and threads... from metawards.utils import guess_num_threads_and_procs (nthreads, nprocs) = guess_num_threads_and_procs(njobs=len(variables), nthreads=args.nthreads, nprocs=args.nprocs, parallel_scheme=parallel_scheme) Console.print( f"\n* Number of threads to use for each model run is {nthreads}", markdown=True) if nprocs > 1: Console.print( f"* Number of processes used to parallelise model " f"runs is {nprocs}", markdown=True) Console.print( f"* Parallelisation will be achieved using {parallel_scheme}", markdown=True) # sort out the random number seed seed = args.seed if seed is None: import random seed = random.randint(10000, 99999999) if seed == 0: # this is a special mode that a developer can use to force # all jobs to use the same random number seed (15324) that # is used for comparing outputs. This should NEVER be used # for production code Console.warning("Using special mode to fix all random number" "seeds to 15324. DO NOT USE IN PRODUCTION!!!") else: Console.print(f"* Using random number seed {seed}", markdown=True) # get the starting day and date start_day = args.start_day if start_day < 0: raise ValueError(f"You cannot use a start day {start_day} that is " f"less than zero!") start_date = None if args.start_date: try: from dateparser import parse start_date = parse(args.start_date).date() except Exception: pass if start_date is None: from datetime import date try: start_date = date.fromisoformat(args.start_date) except Exception as e: raise ValueError(f"Cannot interpret a valid date from " f"'{args.start_date}'. Error is " f"{e.__class__} {e}") if start_date is None: from datetime import date start_date = date.today() Console.print(f"* Day zero is {start_date.strftime('%A %B %d %Y')}", markdown=True) if start_day != 0: from datetime import timedelta start_day_date = start_date + timedelta(days=start_day) Console.print(f"Starting on day {start_day}, which is " f"{start_day_date.strftime('%A %B %d %Y')}") else: start_day_date = start_date # now find the MetaWardsData repository as this will be needed # for the repeat command line too (repository, repository_version) = Parameters.get_repository(args.repository) Console.print(f"* Using MetaWardsData at {repository}", markdown=True) if repository_version["is_dirty"]: Console.warning("This repository is dirty, meaning that the data" "has not been committed to git. This may make " "this calculation very difficult to reproduce") # now work out the minimum command line needed to repeat this job args.seed = seed args.nprocs = nprocs args.nthreads = nthreads args.start_date = start_date.isoformat() args.repository = repository # also print the source of all inputs import configargparse Console.rule("Souce of inputs") p = configargparse.get_argument_parser("main") Console.print(p.format_values()) # print out the command used to repeat this job repeat_cmd = "metawards" for key, value in vars(args).items(): if value is not None: k = key.replace("_", "-") if isinstance(value, bool): if value: repeat_cmd += f" --{k}" elif isinstance(value, list): repeat_cmd += f" --{k}" for val in value: v = str(val) if " " in v: repeat_cmd += f" '{v}''" else: repeat_cmd += f" {v}" else: v = str(value) if " " in v: repeat_cmd += f" --{k} '{v}''" else: repeat_cmd += f" --{k} {v}" Console.rule("Repeating this run") Console.print("To repeat this job use the command;") Console.command(repeat_cmd) Console.print("Or alternatively use the config.yaml file that will be " "written to the output directory and use the command;") Console.command("metawards -c config.yaml") # load all of the parameters try: params = Parameters.load(parameters=args.parameters) except Exception as e: Console.warning( f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # should we profile the code? (default no as it prints a lot) profiler = None if args.no_profile: profiler = None elif args.profile: from metawards.utils import Profiler profiler = Profiler() # load the disease and starting-point input files Console.rule("Disease") if args.disease: params.set_disease(args.disease) else: params.set_disease("ncov") Console.rule("Model data") if args.model: params.set_input_files(args.model) else: params.set_input_files("2011Data") # load the user-defined custom parameters Console.rule("Custom parameters and seeds") if args.user_variables: custom = VariableSet.read(args.user_variables) Console.print(f"Adjusting variables to {custom}") custom.adjust(params) else: Console.print("Not adjusting any parameters...") # read the additional seeds if args.additional is None or len(args.additional) == 0: Console.print("Not using any additional seeds...") else: for additional in args.additional: Console.print(f"Loading additional seeds from {additional}") params.add_seeds(additional) # what to do with the 0 state? stage_0 = "R" if args.disable_star: Console.print("Disabling the * state. Stage 0 is the one and " "only E state.") stage_0 = "disable" elif args.star_is_E: Console.print("Setting the * state as an additional E state.") stage_0 = "E" else: Console.print("Setting the * state as an additional R state.") stage_0 = "R" params.stage_0 = stage_0 # extra parameters that are set params.UV = args.UV # set these extra parameters to 0 params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 Console.rule("Parameters") Console.print(params, markdown=True) # the size of the starting population population = Population(initial=args.population, date=start_day_date, day=start_day) Console.rule("Building the network") network = Network.build(params=params, population=population, max_nodes=args.max_nodes, max_links=args.max_links, profiler=profiler) if args.demographics: from metawards import Demographics Console.rule("Specialising into demographics") demographics = Demographics.load(args.demographics) Console.print(demographics) network = network.specialise(demographics, profiler=profiler, nthreads=nthreads) Console.rule("Preparing to run") from metawards import OutputFiles from metawards.utils import run_models outdir = args.output if outdir is None: outdir = "output" if args.force_overwrite_output: prompt = None else: from metawards import input def prompt(x): return input(x, default="y") auto_bzip = True if args.auto_bzip: auto_bzip = True elif args.no_auto_bzip: auto_bzip = False if args.iterator: iterator = args.iterator else: iterator = None if args.extractor: extractor = args.extractor else: extractor = None if args.mixer: mixer = args.mixer else: mixer = None if args.mover: mover = args.mover else: mover = None with OutputFiles(outdir, force_empty=args.force_overwrite_output, auto_bzip=auto_bzip, prompt=prompt) as output_dir: # write the config file for this job to output/config.yaml Console.rule("Running the model") CONSOLE = output_dir.open("console.log") Console.save(CONSOLE) lines = [] max_keysize = None for key, value in vars(args).items(): if max_keysize is None: max_keysize = len(key) elif len(key) > max_keysize: max_keysize = len(key) for key, value in vars(args).items(): if value is not None: key = key.replace("_", "-") spaces = " " * (max_keysize - len(key)) if isinstance(value, bool): if value: lines.append(f"{key}:{spaces} true") else: lines.append(f"{key}:{spaces} false") elif isinstance(value, list): s_value = [str(x) for x in value] lines.append(f"{key}:{spaces} [ {', '.join(s_value)} ]") else: lines.append(f"{key}:{spaces} {value}") CONFIG = output_dir.open("config.yaml", auto_bzip=False) lines.sort(key=str.swapcase) CONFIG.write("\n".join(lines)) CONFIG.write("\n") CONFIG.flush() CONFIG.close() lines = None result = run_models(network=network, variables=variables, population=population, nprocs=nprocs, nthreads=nthreads, seed=seed, nsteps=args.nsteps, output_dir=output_dir, iterator=iterator, extractor=extractor, mixer=mixer, mover=mover, profiler=profiler, parallel_scheme=parallel_scheme) if result is None or len(result) == 0: Console.print("No output - end of run") return 0 Console.rule("End of the run", style="finish") Console.save(CONSOLE) return 0
def test_pathway(): demographics = Demographics.load(demographics_json) assert len(demographics) == 2 disease_home = Disease.load(filename=home_json) disease_super = Disease.load(filename=super_json) assert demographics[1].disease is None assert demographics[0].disease == disease_super params = Parameters.load() params.set_disease(disease_home) params.set_input_files("single") params.add_seeds("ExtraSeedsOne.dat") network = Network.build(params) print(network.params.disease_params) print(disease_home) assert network.params.disease_params == disease_home network = network.specialise(demographics) print(network.params.disease_params) print(disease_home) assert network.params.disease_params == disease_home print(network.subnets[1].params.disease_params) print(disease_home) assert network.subnets[1].params.disease_params == disease_home print(network.subnets[0].params.disease_params) print(disease_super) assert network.subnets[0].params.disease_params == disease_super infections = network.initialise_infections() assert infections.N_INF_CLASSES == disease_home.N_INF_CLASSES() assert \ infections.subinfs[1].N_INF_CLASSES == disease_home.N_INF_CLASSES() assert \ infections.subinfs[0].N_INF_CLASSES == disease_super.N_INF_CLASSES() assert disease_super.N_INF_CLASSES() != disease_home.N_INF_CLASSES() outdir = os.path.join(script_dir, "test_pathway") with OutputFiles(outdir, force_empty=True, prompt=None) as output_dir: results = network.copy().run(population=Population(), output_dir=output_dir, mixer=mix_evenly, nthreads=1, seed=36538943) # using one thread, but if use 2 then have a system crash after # any other test that uses the big network. This is because we # have intialised some global data that assumes a large network, # which then fails for the small network OutputFiles.remove(outdir, prompt=None) print(results[-1]) print(results[-1].initial) expected = Population(susceptibles=519, latent=0, total=0, recovereds=481, n_inf_wards=0, day=90) print(expected) assert results[-1].has_equal_SEIR(expected) assert results[-1].day == expected.day with OutputFiles(outdir, force_empty=True, prompt=None) as output_dir: results = network.copy().run(population=Population(), output_dir=output_dir, mixer=mix_evenly, nthreads=1, seed=36538943) OutputFiles.remove(outdir, prompt=None) print(results[-1]) print(results[-1].initial) print(expected) assert results[-1].has_equal_SEIR(expected) assert results[-1].day == expected.day variables = VariableSet() print("\nUpdate with null variables") oldparams = network.params params = network.params.set_variables(variables) network.update(params) assert oldparams == network.params print(network.params.disease_params) print(disease_home) assert network.params.disease_params == disease_home print(network.subnets[1].params.disease_params) print(disease_home) assert network.subnets[1].params.disease_params == disease_home print(network.subnets[0].params.disease_params) print(disease_super) assert network.subnets[0].params.disease_params == disease_super infections = network.initialise_infections() assert infections.N_INF_CLASSES == disease_home.N_INF_CLASSES() assert \ infections.subinfs[1].N_INF_CLASSES == disease_home.N_INF_CLASSES() assert \ infections.subinfs[0].N_INF_CLASSES == disease_super.N_INF_CLASSES() assert disease_super.N_INF_CLASSES() != disease_home.N_INF_CLASSES() outdir = os.path.join(script_dir, "test_pathway") with OutputFiles(outdir, force_empty=True, prompt=None) as output_dir: results = network.copy().run(population=Population(), output_dir=output_dir, mixer=mix_evenly, nthreads=1, seed=36538943) OutputFiles.remove(outdir, prompt=None) print(results[-1]) print(expected) assert results[-1].has_equal_SEIR(expected) assert results[-1].day == expected.day
def cli(): import sys import argparse from metawards import Parameters, Network, Population parser = argparse.ArgumentParser( description="MetaWards epidemic modelling - see " "https://github.com/chryswoods/metawards " "for more information", prog="metawards") parser.add_argument('-i', '--input', type=str, help="Input file for the simulation") parser.add_argument('-l', '--line', type=int, default=0, help="Line number from the inputfile to run " "(default 0 as 0-indexed line number)") parser.add_argument('-s', '--seed', type=int, default=None, help="Random number seed for this run " "(default is to use a random seed)") parser.add_argument('-u', '--UV', type=float, default=1.0, help="Value for the UV parameter for the model " "(default is 1.0)") parser.add_argument('-d', '--disease', type=str, default="ncov", help="Name of the disease to model " "(default is 'ncov')") parser.add_argument('-I', '--input-data', type=str, default="2011Data", help="Name of the input data set for the network " "(default is '2011Data')") parser.add_argument('-p', '--parameters', type=str, default="march29", help="Name of the input parameter set used to " "control the simulation (default 'march29')") parser.add_argument('-r', '--repository', type=str, default=None, help="Path to the MetaWardsData repository. If " "unspecified this defaults to the value " "in the environment variable METAWARDSDATA " "or, if that isn't specified, to " "$HOME/GitHub/MetaWardsData") parser.add_argument('-P', '--population', type=int, default=57104043, help="Initial population (default 57104043)") parser.add_argument('-n', '--nsteps', type=int, default=None, help="Maximum number of steps to run for the " "simulation (default is to run until the " "epidemic has finished)") parser.add_argument('-o', '--output', type=str, default="output", help="Path to the directory in which to place all " "output files (default 'output')") args = parser.parse_args() print(args) if args.input is None: parser.print_help(sys.stdout) sys.exit(0) # load all of the parameters try: params = Parameters.load(parameters=args.parameters) except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(args.disease) params.set_input_files(args.input_data) # start from the parameters in the specified line number of the # provided input file params.read_file(args.input, args.line) # extra parameters that are set params.UV = args.UV # set these extra parameters to 0 params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=args.population) print("Building the network...") network = Network.build(params=params, calculate_distances=True) print("Run the model...") population = network.run(population=population, seed=args.seed, s=-1, nsteps=args.nsteps, output_dir=args.output) print("End of the run") print(f"Model output: {population}")
def test_network_copy(prompt=None, nthreads=1): # user input parameters import random seed = random.randint(100000, 1000000) UV = 0.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(os.path.join(script_dir, "data", "ncov.json")) params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=57104043) nsteps = 20 print("Building the network...") network = Network.build(params=params) outdir = os.path.join(script_dir, "test_network_copy") print("Run 1") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: t1 = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, extractor=extract_none, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) print("Run 2") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: t2 = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, extractor=extract_none, nthreads=nthreads) print("Run 3") with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: t3 = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, extractor=extract_none, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) print(t1) print(t2) print(t3) assert t1 == t2 assert t1 == t3
def test_demographics_reset(prompt=None, nthreads=1, force_multi=False): """This test runs several runs one after another with the expectation that they should all give the same result. This tests that the network is being correctly reset after each run. This test uses a mixer and demographics to show that these can be reset """ # user input parameters import random seed = random.randint(100000, 1000000) inputfile = ncovparams_csv line_num = 0 UV = 0.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease(os.path.join(script_dir, "data", "ncov.json")) params.set_input_files("2011Data") params.add_seeds("ExtraSeedsBrighton.dat") # start from the parameters in the specified line number of the # provided input file variables = params.read_variables(inputfile, line_num) # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=57104043) profiler = Profiler() nsteps = 20 demographics = Demographics.load(redblue_json) print("Building the network...") network = Network.build(params=params, profiler=profiler) network = network.specialise(demographics, nthreads=2, profiler=profiler) outdir = os.path.join(script_dir, "test_integration_output") if can_run_multiprocessing(force_multi): print("Running parallel...") variable = variables[0] variables = VariableSets() variables.append(variable) variables = variables.repeat(3) params = params.set_variables(variables[0]) network.update(params, profiler=profiler) with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: results = run_models(network=network, mixer=mix_shield, output_dir=output_dir, variables=variables, population=population, nsteps=nsteps, nthreads=nthreads, nprocs=2, seed=seed, debug_seeds=True) OutputFiles.remove(outdir, prompt=None) assert len(results) == 3 print(f"Result 1\n{results[0][1][-1]}") print(f"Result 2\n{results[1][1][-1]}") print(f"Result 3\n{results[2][1][-1]}") assert results[0][1] == results[1][1] assert results[0][1] == results[2][1] print("Running model 1...") network.update(params, profiler=profiler) with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory1 = network.run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, profiler=None, mixer=mix_shield, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) # this should reset the network print("Running model 2...") network.update(params, profiler=profiler) with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory2 = network.run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, profiler=None, mixer=mix_shield, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) # this should reset the network print("Running model 3...") network.update(params, profiler=profiler) with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory3 = network.run(population=population, seed=seed, output_dir=output_dir, nsteps=nsteps, profiler=None, mixer=mix_shield, nthreads=nthreads) OutputFiles.remove(outdir, prompt=None) print("End of the run") print(profiler) print(f"Model 1 output: {trajectory1}") print(f"Model 2 output: {trajectory2}") print(f"Model 3 output: {trajectory3}") assert trajectory1 == trajectory2 assert trajectory1 == trajectory3 if can_run_multiprocessing(force_multi): # this should also be the same result as the multiprocessing run assert trajectory1 == results[0][1]
def test_local(): prompt = None # user input parameters seed = 15324 UV = 1.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease("ncov") params.set_input_files("2011Data") params.add_seeds("1 5 2124") # seeding 5 into ward 2124 # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=57104043) print("Building the network...") network = Network.build(params=params) outdir = os.path.join(script_dir, "test_local_output") # First check that setting the local cutoff has the same effect # as setting the global cutoff with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=50, iterator=iterate_cutoff, extractor=extract_cutoff, nthreads=1) OutputFiles.remove(outdir, prompt=None) # run setting the global dyn_dist_cutoff to zero with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: net2 = network.copy() net2.params.dyn_dist_cutoff = 0.0 trajectory2 = net2.run(population=population, seed=seed, output_dir=output_dir, nsteps=50, extractor=extract_cutoff, nthreads=1) OutputFiles.remove(outdir, prompt=None) print(f"Model output: {trajectory}") print(f"Model output: {trajectory2}") assert trajectory == trajectory2 # now test that setting the scale_uv has the same effect as # setting the global scale_uv with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=50, iterator=iterate_scale, extractor=extract_scale, nthreads=1) OutputFiles.remove(outdir, prompt=None) # run setting the global dyn_dist_cutoff to zero with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: net2 = network.copy() pop2 = deepcopy(population) pop2.scale_uv = 0.0 trajectory2 = net2.run(population=pop2, seed=seed, output_dir=output_dir, nsteps=50, extractor=extract_scale, nthreads=1) OutputFiles.remove(outdir, prompt=None) print(f"Model output: {trajectory}") print(f"Model output: {trajectory2}") p = trajectory[-1] p2 = trajectory2[-1] # won't be identical as different scale_uv causes different order # of random numbers - but should still have 0 infections assert p.has_equal_SEIR(p2) # now test that setting both to non-zero values has the same effect with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: trajectory = network.copy().run(population=population, seed=seed, output_dir=output_dir, nsteps=50, iterator=iterate_both, nthreads=1) OutputFiles.remove(outdir, prompt=None) # run setting the global dyn_dist_cutoff to zero with OutputFiles(outdir, force_empty=True, prompt=prompt) as output_dir: net2 = network.copy() net2.params.dyn_dist_cutoff = 42.0 pop2 = deepcopy(population) pop2.scale_uv = 0.5 trajectory2 = net2.run(population=pop2, seed=seed, output_dir=output_dir, nsteps=50, nthreads=1) OutputFiles.remove(outdir, prompt=None) print(f"Model output: {trajectory}") print(f"Model output: {trajectory2}") p = trajectory[-1] p2 = trajectory2[-1] p2.scale_uv = 1 assert p == p2
def test_integration(): """This test repeats main_RepeatsNcov.c and validates that the various stages report the same results as the original C code """ # user input parameters seed = 15324 inputfile = ncovparams_csv line_num = 0 UV = 1.0 # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e # load the disease and starting-point input files params.set_disease("ncov") params.set_input_files("2011Data") # start from the parameters in the specified line number of the # provided input file params.read_file(inputfile, line_num) # extra parameters that are set params.UV = UV params.static_play_at_home = 0 params.play_to_work = 0 params.work_to_play = 0 params.daily_imports = 0.0 # the size of the starting population population = Population(initial=57104043) print("Building the network...") network = Network.build(params=params, calculate_distances=True) print("Run the model...") population = network.run(population=population, seed=seed, s=-1, nsteps=20) print("End of the run") print(f"Model output: {population}") # The original C code has this expected population after 20 steps expected = Population(initial=57104043, susceptibles=56081923, latent=61, total=17, recovereds=76, n_inf_wards=24) print(f"Expect output: {expected}") assert population == expected
def test_ward_conversion(): # load all of the parameters try: params = Parameters.load(parameters="march29") except Exception as e: print(f"Unable to load parameter files. Make sure that you have " f"cloned the MetaWardsData repository and have set the " f"environment variable METAWARDSDATA to point to the " f"local directory containing the repository, e.g. the " f"default is $HOME/GitHub/MetaWardsData") raise e params.set_input_files("2011Data") print("Building the network...") network = Network.build(params=params) profiler = Profiler() profiler = profiler.start("to_json") wards = network.to_wards(profiler=profiler) print(f"{wards.num_workers()} / {wards.num_players()}") _assert_equal(wards.num_workers(), network.work_population) _assert_equal(wards.num_players(), network.play_population) print(f"{wards.num_work_links()} / {wards.num_play_links()}") _assert_equal(wards.num_work_links(), network.nlinks) _assert_equal(wards.num_play_links(), network.nplay) print("Converting to data...") data = wards.to_data(profiler=profiler) print("Converting to json...") profiler = profiler.start("Convert to JSON") s = json.dumps(data) profiler = profiler.stop() profiler = profiler.stop() # end to_json print(f"Done - {len(s)/(1024*1024.0)} MB : {s[0:1024]}...") print(f"Converting from json...") profiler = profiler.start("Convert from JSON") profiler = profiler.start("from_json") data = json.loads(s) profiler = profiler.stop() wards2 = Wards.from_data(data, profiler=profiler) assert wards2 == wards network2 = Network.from_wards(wards2, profiler=profiler) profiler = profiler.stop() Console.print(profiler) Console.print("Validating equality - may take some time...") _assert_equal(network2.nnodes, network.nnodes) _assert_equal(network2.nlinks, network.nlinks) _assert_equal(network2.nplay, network.nplay) if network.info is None: assert network2.info is None _assert_equal(len(network.info), len(network2.info)) Console.print(f"{len(network.info)}, {network.nnodes}") with Console.progress() as progress: task1 = progress.add_task("Validating info", total=len(network.info)) task2 = progress.add_task("Validating nodes", total=network.nnodes) task3 = progress.add_task("Validating work", total=network.nlinks) task4 = progress.add_task("Validating play", total=network.nplay) for i in range(0, len(network.info)): assert network.info[i] == network2.info[i] progress.update(task1, advance=1) progress.update(task1, completed=len(network.info), force_update=True) for i in range(1, network.nnodes + 1): _assert_equal(network.nodes.label[i], network2.nodes.label[i]) _assert_equal(network.nodes.begin_to[i], network2.nodes.begin_to[i]) _assert_equal(network.nodes.end_to[i], network2.nodes.end_to[i]) _assert_equal(network.nodes.self_w[i], network2.nodes.self_w[i]) _assert_equal(network.nodes.begin_p[i], network2.nodes.begin_p[i]) _assert_equal(network.nodes.end_p[i], network2.nodes.end_p[i]) _assert_equal(network.nodes.self_p[i], network2.nodes.self_p[i]) _assert_equal(network.nodes.x[i], network2.nodes.x[i]) _assert_equal(network.nodes.y[i], network2.nodes.y[i]) progress.update(task2, advance=1) progress.update(task2, completed=network.nnodes, force_update=True) for i in range(1, network.nlinks + 1): _assert_equal(network.links.ifrom[i], network2.links.ifrom[i]) _assert_equal(network.links.ito[i], network2.links.ito[i]) _assert_equal(network.links.weight[i], network2.links.weight[i]) _assert_equal(network.links.suscept[i], network2.links.suscept[i]) progress.update(task3, advance=1) progress.update(task3, completed=network.nlinks, force_update=True) for i in range(1, network.nplay + 1): _assert_equal(network.play.ifrom[i], network2.play.ifrom[i]) _assert_equal(network.play.ito[i], network2.play.ito[i]) _assert_equal(network.play.weight[i], network2.play.weight[i]) _assert_equal(network.play.suscept[i], network2.play.suscept[i]) progress.update(task4, advance=1) progress.update(task4, completed=network.nplay, force_update=True)