def gen(): for rootname in rootnames: yield Job(rootname, argv, verbose=verbose, mapkwargs=mapkwargs, return_fig=False)
def overdisp(ms, overwaves, overtypes, overmodes, overfreqs, verbose=True, **mapkwargs): """extrapolate dispersion curves""" herrmanncaller = HerrmannCallerBasis(waves=overwaves, types=overtypes, modes=overmodes, freqs=overfreqs, h=0.005, ddc=0.005) fun = _OverdispCore(herrmanncaller) gen = (Job(mms) for mms in ms) with MapSync(fun, gen, **mapkwargs) as ma: if verbose: wb = waitbar('overdisp') Njobs = len(ms) - 1. for jobid, (mms, overvalues), _, _ in ma: if verbose: wb.refresh(jobid / Njobs) dds = (overwaves, overtypes, overmodes, overfreqs, overvalues) yield mms, dds if verbose: wb.close() print() print()
def overdisp(ms, overwaves, overtypes, overmodes, overfreqs, verbose=True, **mapkwargs): """extrapolate dispersion curves""" def fun(mms): ztop, vp, vs, rh = mms try: overvalues = dispersion(ztop, vp, vs, rh, overwaves, overtypes, overmodes, overfreqs, h=0.005, dcl=0.005, dcr=0.005) except KeyboardInterrupt: raise except Exception as e: h = ztop[1:] - ztop[:-1] # assume failuer was caused by rounding issues h[h <= 0.001] = 0.001001 ztop = np.concatenate(([0.], h.cumsum())) try: #again overvalues = dispersion(ztop, vp, vs, rh, overwaves, overtypes, overmodes, overfreqs, h=0.005, dcl=0.005, dcr=0.005) except KeyboardInterrupt: raise except Exception as giveup: overvalues = np.nan * np.ones(len(overwaves)) return mms, overvalues with MapSync(fun, (Job(mms) for mms in ms), **mapkwargs) as ma: if verbose: wb = waitbar('overdisp') Njobs = len(ms) - 1. for jobid, (mms, overvalues), _, _ in ma: if verbose: wb.refresh(jobid / Njobs) dds = (overwaves, overtypes, overmodes, overfreqs, overvalues) yield mms, dds if verbose: wb.close() print print
def gen(): for rootname in rootnames: yield Job(rootname, extract_mode, extract_limit, extract_llkmin, extract_step, verbose, percentiles=[.16, .5, .84], mapkwargs=mapkwargs)
def gen(rootnames, runmode): for rootname in rootnames: targetfile = "%s/_HerrMet.target" % rootname paramfile = "%s/_HerrMet.param" % rootname runfile = "%s/_HerrMet.run" % rootname if runmode == "append" and not os.path.exists(runfile): runmode = "restart" elif runmode == "restart" and os.path.exists(runfile): os.remove(runfile) elif runmode == "skip" and os.path.exists(runfile): print "skip %s" % rootname continue # ------ p, logRHOM = load_paramfile(paramfile) # ------ d = makedatacoder(targetfile, which=Datacoder_log) # datacoder based on observations dobs, CDinv = d.target() duncs = CDinv ** -.5 ND = len(dobs) dinfs = d(0.1 * np.ones_like(d.values)) dsups = d(3.5 * np.ones_like(d.values)) logRHOD = LogGaussND(dobs, duncs, dinfs, dsups, k=1000., nanbehavior=1) # ------ G = Theory(parameterizer=p, datacoder=d) # --------------------------------- if runmode == "restart" or runmode == "skip": with RunFile(runfile, create=True, verbose=verbose) as rundb: rundb.drop() rundb.reset(p.NLAYER, d.waves, d.types, d.modes, d.freqs) elif runmode == "append": pass else: raise Exception('unexpected runmode %s' % runmode) # --------------------------------- for chainid in xrange(Nchain): M0 = np.random.rand(len(p.MINF)) * (p.MSUP - p.MINF) + p.MINF MSTD = p.MSTD yield Job(runfile=runfile, rootname=rootname, chainid=chainid, M0=M0, MSTD=MSTD, G=G, ND=ND, logRHOD=logRHOD, logRHOM=logRHOM, p=p, d=d, nkeep=Nkeep, verbose=verbose)
def gen(rootnames): for rootname in rootnames: targetfile = "%s/_HerrMet.target" % rootname paramfile = "%s/_HerrMet.param" % rootname runfile = "%s/_HerrMet.run" % rootname # ------ p, logRHOM = load_paramfile(paramfile) # ------ d = makedatacoder(targetfile, which=Datacoder_log) # datacoder based on observations dobs, CDinv = d.target() duncs = CDinv ** -.5 ND = len(dobs) dinfs = d(0.1 * np.ones_like(d.values)) dsups = d(3.5 * np.ones_like(d.values)) logRHOD = LogGaussND(dobs, duncs, dinfs, dsups, k=1000., nanbehavior=1) # ------ G = Theory(parameterizer=p, datacoder=d) # --------------------------------- with RunFile(runfile, verbose=verbose) as rundb: best = list(rundb.get(llkmin=top_llkmin, limit=top_limit, step=top_step, algo=None)) # --------------------------------- for modelid, chainid, weight, llk, nlayer, model, dat in best: M0 = p(*model) DM = 1.0 #p.MSTD yield Job(runfile=runfile, rootname=rootname, chainid=chainid, M0=M0, DM=DM, G=G, ND=ND, logRHOD=logRHOD, logRHOM=logRHOM, p=p, d=d, verbose=verbose)
def gen(): for rootname in rootnames: yield Job(rootname, argv, verbose=verbose, mapkwargs=mapkwargs)
def job_generator(): for iy in range(ny): for jx in range(nx): # order matters!!!! vs = Mprior[:, iy, jx] yield Job(iy, jx, ztop, vs)
def JobGen(): for weight, dm in zip(weights, dms): yield Job(weight, dm)
def gen(): for i in xrange(1, 4 * len(ztop)): modeli = model0.copy() modeli[i] += dmodel[i] yield Job(i, modeli)
def job_generator(): for iy in range(ny): for jx in range(nx): # order matters!!!! nnode = iy * nx + jx yield Job(nnode, theorys[iy, jx], M[:, iy, jx])
def job_generator(): for i in range(self.shape[0]): yield Job(i)
def job_generator(): ls = zip(parameterizer_strings, datacoder_strings) for nnode, (ps, ds) in enumerate(ls): yield Job(nnode, parameterizer_string=ps, datacoder_string=ds)
def job_generator(): for i in range(len(b)): yield Job(i)