def numpyArray(self): import tempfile import os from scipy.io import mmio from fipy.tools import parallelComm if parallelComm.procID == 0: (_, mtxName) = tempfile.mkstemp(suffix='.mtx') else: mtxName = None mtxName = parallelComm.bcast(mtxName) self.exportMmf(mtxName) parallelComm.Barrier() mtx = mmio.mmread(mtxName) parallelComm.Barrier() if parallelComm.procID == 0: os.remove(mtxName) coo = mtx.tocoo() trilinosMatrix = self.matrix numpyArray = numerix.zeros( (trilinosMatrix.NumGlobalRows(), trilinosMatrix.NumGlobalCols()), 'd') numpyArray[coo.row, coo.col] = coo.data return numpyArray
def savePhi(elapsed): if parallelComm.procID == 0: fname = data["t={}.tar.gz".format(elapsed)].make().abspath else: fname = None fname = parallelComm.bcast(fname) fp.tools.dump.write((phi,), filename=fname)
def savePhi(elapsed, data_dir): if parallelComm.procID == 0: fname = data_dir / "t={}.tar.gz".format(elapsed) fname.touch() else: fname = None fname_bcast = parallelComm.bcast(fname) fp.tools.dump.write((phi, ), filename=fname_bcast)
def _calcValue(self): from fipy.tools import parallelComm rnd = self.parallelRandom() if parallelComm.Nproc > 1: rnd = parallelComm.bcast(rnd, root=0) return rnd[self.mesh._globalOverlappingCellIDs] else: return rnd
eta.constrain(1., where=YY == 0.) eta.constrain(0., where=YY == 0.5) eta.value = eta_fp(xx, yy, 0.) eq = (fp.TransientTerm() == -4 * eta * (eta - 1) * (eta - 0.5) + fp.DiffusionTerm(coeff=kappa_fp) + eq_fp(xx, yy, elapsed)) start = time.time() while elapsed.value <= totaltime: eta.updateOld() eq.solve(var=eta, dt=dt) elapsed.value = elapsed() + dt end = time.time() data.categories["solvetime"] = end - start error = eta - eta_fp(xx, yy, elapsed - dt) error.name = r"$\Delta\eta$" if parallelComm.procID == 0: fname = data["eta.tar.gz"].make().abspath else: fname = None fname = parallelComm.bcast(fname) fp.tools.dump.write((eta, error), filename=fname)
data = dtr.Treant(output) else: class dummyTreant(object): categories = dict() data = dummyTreant() if parallelComm.procID == 0: eq_str = data.categories["eq"] eta_str = data.categories["eta"] kappa_fp = data.categories["kappa"] else: eq_str = eta_str = kappa_fp = None eq_str, eta_str, kappa_fp = parallelComm.bcast((eq_str, eta_str, kappa_fp)) from fipy.tools.numerix import tanh, sqrt, sin, cos, pi eq_fp = eval(eq_str) eta_fp = eval(eta_str) # load checkpoint if parallelComm.procID == 0: fname = data["step{}.tar.gz".format(startfrom)].make().abspath else: fname = None fname = parallelComm.bcast(fname) eta, _ = fp.tools.dump.read(filename=fname)
def nucleus(x0, y0, r0): r = fp.numerix.sqrt((x - x0)**2 + (y - y0)**2) return (1 - fp.numerix.tanh((r - r0) / fp.numerix.sqrt(2.))) / 2 # > Generate random positions with uniform distribution for 25 supercritical seeds with $r_0 = 1.1r_c$. # In[12]: if not params['restart']: if parallelComm.procID == 0: seeds = fp.numerix.random.random(size=(25, 2)) else: seeds = None seeds = parallelComm.bcast(seeds) for fx, fy in seeds: phi.setValue(phi + nucleus(x0=fx * Lx, y0=fy * Ly, r0=params['factor'] * rc)) phi.setValue(1., where=phi > 1.) # ## Setup output # ### Setup ouput storage # In[13]: try: from sumatra.projects import load_project project = load_project(os.getcwd())
# In[ ]: if parallelComm.procID == 0: if params['restart']: fname = os.path.join(os.path.dirname(params['restart']), "nucleii.txt") nucleii = fp.numerix.loadtxt(fnamem, skiprows=1) else: times = fp.numerix.random.random(params['numnuclei']) * totaltime times.sort() nucleii = fp.numerix.concatenate((times[..., fp.numerix.newaxis], fp.numerix.random.random( (params['numnuclei'], 2))), axis=-1) else: nucleii = None nucleii = parallelComm.bcast(nucleii, root=0) # ## Setup output # ### Setup ouput storage # In[ ]: try: from sumatra.projects import load_project project = load_project(os.getcwd()) record = project.get_record(params["sumatra_label"]) output = record.datastore.root except: # either there's no sumatra, no sumatra project, or no sumatra_label # this will be the case if this script is run directly