if retcode!=None: done.append(idx) logging.info("{0} completed {1}".format(todo[idx][4], retcode)) done.reverse() for d in done: processes.pop(d) todo.pop(d) if len(processes)==0: return True time.sleep(interval) return False if __name__ == "__main__": #logging.basicConfig(level=logging.INFO) parser=DefaultArgumentParser(description="Quick look at an H5 file") parser.add_argument("--file", dest="file", action="store", default="rider.h5", help="data file to read") parser.add_argument("--image", dest="image", action="store", default="image.h5", help="summary data from file") parser.add_function("report", "Build the report") parser.add_function("lines", "Write the image of all realizations") parser.add_function("binned", "Average daily prevalence") parser.add_function("multiples", "Small multiples graph of one realization") parser.add_function("summary", "Several summary graphs") parser.add_function("generate", "Create graphs and report") args=parser.parse_args() pyfile=os.path.abspath(__file__) filename=os.path.abspath(args.file)
for iri in it.product(range(1,res), range(1,res)): ir=np.array(total_individual*np.array(iri)/res, dtype=np.int) sir=[total_individual, ir[0], ir[1]] fname="arr-{0}-{1}-{2}.h5".format(sir[0]-(ir[0]+ir[1]) , sir[1], sir[2]) to_run=['./sirexp', '-j','4', '--runcnt', str(run_cnt), '-s', str(sum(sir)), '-i', str(sir[1]), '-r', str(sir[2]), '--seed', str(seed), '--endtime', str(0.2), '--loglevel', 'warning', '--beta1', str(0), '--datafile', fname] logger.debug(to_run) ret=subprocess.Popen(to_run) result=ret.communicate()[0] # join the process seed+=1 if __name__=='__main__': parser=DefaultArgumentParser(description="Run sirexp many times") parser.add_function("exp", "Explore initial conditions") parser.add_function("time", "Time how long it takes to run.") parser.add_argument("--res", dest="resolution", type=int, action="store", default=10, help="How finely to subdivide the SIR space") parser.add_argument("--runcnt", dest="runcnt", type=int, action="store", default=10, help="How many times to run each simulation") args=parser.parse_args() if args.exp: matrix_run(args.resolution, args.runcnt) if args.time: for individual_cnt in [5000, 10000, 20000, 50000, 100000, 200000, 500000, 1000000]: elapsed=timing_run(individual_cnt, args.runcnt) print("{0}\t{1}".format(individual_cnt, elapsed))
logger.info("Mean value of points {0}.".format(np.dot(x, y)/np.sum(y))) logger.info("Gamma mean {0}".format(a*th)) cdf_to_5=scipy.stats.gamma.cdf(0.5, a=a, scale=th) cdf_to_15=scipy.stats.gamma.cdf(1.5, a=a, scale=th) cdf_to_25=scipy.stats.gamma.cdf(2.5, a=a, scale=th) logger.info("Gamma {0}".format((cdf_to_5, cdf_to_15-cdf_to_5, cdf_to_25-cdf_to_15))) toplot="hazard" if toplot=="survival": plot_histogram(x, y, params[times_idx], names[times_idx]) elif toplot=="hazard": plot_hazard(x, y, params[times_idx], names[times_idx]) if __name__ == "__main__": parser=DefaultArgumentParser(description="Produces csv of total outbreak size") parser.add_argument("--input", dest="infile", action="store", default="naadsm.out", help="Input trace from NAADSM/SC") parser.add_argument("--output", dest="outfile", action="store", default="naadsm.h5", help="HDF5 file with events") parser.add_function("multiple", "Copy all events to output file") parser.add_function("showstates", "Take a look at state transitions") parser.add_function("singlefarm", "One farm over and over.") args=parser.parse_args() if args.multiple: initial=np.array([1]) allowed_transitions=read_multiple_naadsmsc(args.infile, args.outfile, initial) logger.info("allowed transitions are {0}.".format(allowed_transitions)) if args.singlefarm:
writer=csv.writer(csvfile, quoting=csv.QUOTE_MINIMAL) writer.writerow(["trial", "value", "censored"]) idx=1 for oidx in range(len(observed)): for i in range(observed[oidx]): writer.writerow([idx, oidx, 1]) idx+=1 for oidx in range(len(censored)): for i in range(censored[oidx]): writer.writerow([idx, oidx, 0]) idx+=1 if __name__ == "__main__": parser=DefaultArgumentParser(description="Finds residence time in states.") parser.add_argument("--input", dest="infile", action="store", default="naadsm.h5", help="Input HDF5 file with ensemble of events") parser.add_argument("--id", dest="ID", action="store", default="", help="Specify scenario ID label for output files") args=parser.parse_args() counts=BaseCounts() foreach_dataset(args.infile, counts) logger.info("Number of farms {0}.".format(counts.farm_cnt)) logger.info("Number of runs {0}.".format(counts.run_cnt)) logger.info("Largest number of days {0}.".format(counts.day_cnt)) tracking=Tracking(counts.farm_cnt, counts.run_cnt, counts.day_cnt) foreach_dataset(args.infile, tracking)
def save_h5(openh5, events): dset_idx=next_dset(openh5) group=openh5.create_group("/trajectory/dset{0}".format(dset_idx)) event=group.create_dataset("Event", (len(events),), dtype="i") whom=group.create_dataset("Who", (len(events),), dtype="i") who=group.create_dataset("Whom", (len(events),), dtype="i") when=group.create_dataset("When", (len(events),), dtype=np.float64) for eidx in range(len(events)): aevent, awhom, awho, aday=events[eidx] event[eidx]=aevent whom[eidx]=awhom who[eidx]=awho when[eidx]=aday if __name__ == "__main__": parser=DefaultArgumentParser(description="Produces HDF5 event file from trace data") parser.add_argument("--input", dest="infile", action="store", default="naadsm.out", help="Input trace from NAADSM") parser.add_argument("--output", dest="outfile", action="store", default="naadsm.h5", help="HDF5 file with events") args=parser.parse_args() allowed_transitions=read_multiple_naadsmsc(args.infile, args.outfile) logger.info("allowed transitions are {0}.".format(allowed_transitions))
def eeid_long_behavior(): B=1/70 beta=400 mu=1/70 gamma=365/14 S=(mu+gamma)*B/(beta*mu) I=(beta-mu-gamma)*B/(beta*(mu+gamma)) R=gamma*I/mu print("EEID long time is\n\tS\t{0}\n\tI\t{1}\n\tR\t{2}".format(S, I, R)) if __name__ == "__main__": logging.basicConfig(level=logging.INFO) parser=DefaultArgumentParser(description="Quick look at an H5 file") parser.add_function("info", "Find what program made the file.") parser.add_function("trajectory", "Plot the trajectory") parser.add_function("dir", "List datasets") parser.add_function("eeid", "verify eeid example values") parser.add_argument("--file", dest="file", action="store", default="sirexp.h5", help="data file to read") args=parser.parse_args() filename=args.file f=h5py.File(filename, "r") if args.info: showproginfo(f) if args.trajectory:
def write_totals(filename, outfile): logger.info("Reading input {0}. Writing to {1}".format(filename, outfile)) totals=run_sizes(filename) logger.info("Number of trajectories {0}, average size {1}".format( len(totals), np.average(totals))) logger.debug("Sizes are {0}".format(totals)) with open(outfile, 'w') as csvfile: writer=csv.writer(csvfile, quoting=csv.QUOTE_MINIMAL) writer.writerow(["trial", "outbreaksize"]) for i in range(len(totals)): writer.writerow([i+1, totals[i]]) # X_plot=np.linspace(-5, 50, 1000)[:, np.newaxis] # fig, ax=plt.subplots(1, 1) # # Gaussian KDE # kde=KernelDensity(kernel="gaussian", bandwidth=3).fit(totals) # log_dens = kde.score_samples(X_plot) # ax[0,0].fill(X_plot[:, 0], np.exp(log_dens), fc='#AAAFF') if __name__ == '__main__': parser=DefaultArgumentParser(description="Produces csv of total outbreak size") parser.add_argument("--input", dest="infile", action="store", default="run.h5", help="Input HDF5 file with ensemble of events") parser.add_argument("--output", dest="outfile", action="store", default="sizesc.csv", help="CSV output with sizes") args=parser.parse_args() write_totals(args.infile, args.outfile)