def post_hook(): """ Stop any brittle yielding near the edges of the model """ coords = fn.input() zz = (coords[0] - GEO.nd(Model.minCoord[0])) / (GEO.nd(Model.maxCoord[0]) - GEO.nd(Model.minCoord[0])) fact = fn.math.pow(fn.math.tanh(zz*20.0) + fn.math.tanh((1.0-zz)*20.0) - fn.math.tanh(20.0), 4) Model.plasticStrain.data[:] = Model.plasticStrain.data[:] * fact.evaluate(Model.swarm) """ Check spacing for when sedimentation should turn off # This solution was provided by: https://stackoverflow.com/a/38008452 """ rank = uw.rank() root = 0 # get all the moho tracers that are on our CPU, in x sorted order moho_tracers = Model.passive_tracers["Moho"] # Need this for restart safety local_array = numpy.sort(moho_tracers.swarm.particleCoordinates.data[:,0]) sendbuf = numpy.array(local_array) # We have to figure out how many particles each CPU has, and let the root # cpu know sendcounts = numpy.array(MPI.COMM_WORLD.gather(len(sendbuf), root)) if rank == root: # prepare to receive all this data recvbuf = numpy.empty(sum(sendcounts), dtype=float) else: recvbuf = None # Gather up all the data and put it in recvbuf MPI.COMM_WORLD.Gatherv(sendbuf=sendbuf, recvbuf=(recvbuf, sendcounts), root=root) if rank == root: # find the biggest gap in the X direction in the moho_tracers diff = numpy.max(numpy.diff(numpy.sort(recvbuf))) # recvbuf is the array of all particles else: diff = None # Now that we know the biggest gap, tell all the other CPUs diff = MPI.COMM_WORLD.bcast(diff, root=0) biggest_gap = GEO.Dimensionalize(diff, u.km) uw.barrier() print(uw.rank(), "Biggest gap in tracers", biggest_gap) if biggest_gap > gap_to_stop_sedi: print("Sedimentation turned: OFF at {}".format(Model.time)) threshold = -10 * u.kilometers else: print("Sedimentation turned: ON") threshold = -1 * u.kilometers Model.surfaceProcesses = GEO.surfaceProcesses.SedimentationThreshold(air=[air], sediment=[sediment], threshold=threshold)
def print_stats(self): purple = "\033[0;35m" endcol = "\033[00m" boldpurple = "\033[1;35m" if 0==uw.rank(): print boldpurple print( " " ) print( "Pressure iterations: %3d" % (self._cself.stats.pressure_its) ) print( "Velocity iterations: %3d (presolve) " % (self._cself.stats.velocity_presolve_its) ) print( "Velocity iterations: %3d (pressure solve)" % (self._cself.stats.velocity_pressuresolve_its) ) print( "Velocity iterations: %3d (backsolve) " % (self._cself.stats.velocity_backsolve_its) ) print( "Velocity iterations: %3d (total solve) " % (self._cself.stats.velocity_total_its) ) print( " " ) print( "SCR RHS setup time: %.4e" %(self._cself.stats.velocity_presolve_setup_time) ) print( "SCR RHS solve time: %.4e" %(self._cself.stats.velocity_presolve_time) ) print( "Pressure setup time: %.4e" %(self._cself.stats.velocity_pressuresolve_setup_time) ) print( "Pressure solve time: %.4e" %(self._cself.stats.pressure_time) ) print( "Velocity setup time: %.4e (backsolve)" %(self._cself.stats.velocity_backsolve_setup_time) ) print( "Velocity solve time: %.4e (backsolve)" %(self._cself.stats.velocity_backsolve_time) ) print( "Total solve time : %.4e" %(self._cself.stats.total_time) ) print( " " ) print( "Velocity solution min/max: %.4e/%.4e" % (self._cself.stats.vmin,self._cself.stats.vmax) ) print( "Pressure solution min/max: %.4e/%.4e" % (self._cself.stats.pmin,self._cself.stats.pmax) ) print( " " ) print endcol
def get_conditions(self): """ Get the mechanical boundary conditions Returns ------- List of conditions as: [<underworld.conditions._conditions.DirichletCondition, <underworld.conditions._conditions.NeumannCondition] or [<underworld.conditions._conditions.DirichletCondition] """ Model = self.Model # Reinitialise neumnann condition self._neumann_indices = [] for _ in range(Model.mesh.dim): self._neumann_indices.append(Model.mesh.specialSets["Empty"]) for set_ in self.order_wall_conditions: (condition, nodes) = self._wall_indexSets[set_] self._apply_conditions_nodes(condition, nodes) if self.nodeSets: for (condition, nodes) in self.nodeSets: self._apply_conditions_nodes(condition, nodes) self.neumann_conditions = None _neumann_indices = [] # Remove empty Sets for val in self._neumann_indices: if val.data.size > 0: _neumann_indices.append(val) else: _neumann_indices.append(None) self._neumann_indices = tuple(_neumann_indices) # Now we only create a Neumann condition if we have a stress condition # somewhere, on any of the procs. local_procs_has_neumann = np.zeros((uw.nProcs())) global_procs_has_neumann = np.zeros((uw.nProcs())) if self._neumann_indices != tuple( [None for val in range(Model.mesh.dim)]): local_procs_has_neumann[uw.rank()] = 1 comm.Allreduce(local_procs_has_neumann, global_procs_has_neumann) comm.Barrier() if any(global_procs_has_neumann): self.neumann_conditions = uw.conditions.NeumannCondition( fn_flux=Model.tractionField, variable=Model.velocityField, indexSetsPerDof=self._neumann_indices) return self.neumann_conditions
def open_viewer(self, args=[], background=True): """ Open the external viewer. """ fname = self.db.filename if not fname: fname = os.path.join(tmpdir, "gluciferDB" + self.db._id + ".gldb") self.save_database(fname) # Already open? if self._viewerProc and self._viewerProc.poll() == None: return global lavavu if lavavu and uw.rank() == 0: # Open viewer with local web server for interactive/iterative use if background: self._viewerProc = subprocess.Popen( [self.db._lvbin, "-" + str(self.db.step), "-p9999", "-q90", fname] + args, stdout=PIPE, stdin=PIPE, stderr=STDOUT, ) from IPython.display import HTML return HTML( """<a href='#' onclick='window.open("http://" + location.hostname + ":9999");'>Open Viewer Interface</a>""" ) else: lv = self.db.lvrun(db=fname, port=9999)
def show(self, type="image"): """ Shows the generated image inline within an ipython notebook Parameters ---------- type: str Type of visualisation to display ('Image' or 'WebGL'). Default is 'Image'. Returns ------- Ipython HTML object (for type 'Image') Ipython IFrame object (for type 'Webgl') Note that if IPython is not installed, this method will return nothing. """ try: from IPython.display import Image,HTML self._generate_DB() if uw.rank() == 0: if type.lower() == "webgl": return self._generate_HTML() else: return self._generate_image() except ImportError: pass except RuntimeError, e: print "Error creating image: " print e pass
def _generate_HTML(self): if uw.rank() > 0: return try: #Export encoded json string lv = self.db.lvget(script=self._script) #Create link to web content directory if not os.path.isdir("html"): os.symlink(os.path.join(self.db._lvpath, 'html'), 'html') jsonstr = lv.app.web() #Write files to disk first, can be passed directly on url but is slow for large datasets filename = "input_" + self.db._db.name + ".json" text_file = open("html/" + filename, "w") text_file.write(jsonstr) text_file.close() from IPython.display import IFrame return IFrame("html/viewer.html#" + filename, width=self["resolution"][0], height=self["resolution"][1]) #import base64 #return IFrame("html/index.html#" + base64.b64encode(jsonstr), width=self["resolution"][0], height=self["resolution"][1]) except RuntimeError, e: print "LavaVu error: " + str(e) import traceback traceback.print_exc() pass
def open_viewer(self, args=[], background=True): """ Open the external viewer. """ fname = self.db.filename if not fname: fname = os.path.join(tmpdir, "gluciferDB" + self.db._id + ".gldb") self.save_database(fname) #Already open? if self._viewerProc and self._viewerProc.poll() == None: return if uw.rank() == 0: #Open viewer with local web server for interactive/iterative use if background: self._viewerProc = subprocess.Popen( ["LV", "-" + str(self.db.step), "-p9999", "-q90", fname] + self._script + args, stdout=PIPE, stdin=PIPE, stderr=STDOUT) from IPython.display import HTML return HTML( '''<a href='#' onclick='window.open("http://" + location.hostname + ":9999");'>Open Viewer Interface</a>''' ) else: self.db.lvget(db=fname, port=9999)
def save_image(self,filename): """ Saves the generated image to the provided filename. Parameters ---------- filename :str Filename to save file to. May include an absolute or relative path. """ if not isinstance(filename, str): raise TypeError("Provided parameter 'filename' must be of type 'str'. ") self._generate_DB() if uw.rank() == 0: self._generate_image(asfile=True) generatedFilename=self._find_generated_file() absfilename = os.path.abspath(filename) # lets set the final extension to that of the glucifer generated file splitabsfilename = os.path.splitext(absfilename) splitgenfilename = os.path.splitext(generatedFilename) if splitabsfilename[1].lower() in [".png", ".jpg", ".jpeg"]: frontpart = splitabsfilename[0] else: frontpart = absfilename finaloutFile = frontpart+splitgenfilename[1] os.rename(generatedFilename,finaloutFile)
def show(self, type="Image"): """ Shows the generated image inline within an ipython notebook. Parameters ---------- type: str Type of visualisation to display ('Image' or 'WebGL'). If IPython is installed, displays the result image or WebGL content inline If IPython is not installed, this method will call the default image/web output routines to save the result with a default filename in the current directory """ try: if type.lower() != "webgl" and lavavu.is_notebook(): self._generate_DB() if uw.rank() > 0: return from IPython.display import display, Image, HTML #Return inline image result filename = self._generate_image() display(HTML("<img src='%s'>" % filename)) else: #Fallback to export image or call viewer webgl export self.save(filename=self.name, type=type) except RuntimeError as e: print("Error creating image: ", e) pass except: raise
def max_global_auxiliary(self): """ Returns the results of the auxiliary function evaluated at the location corresponding to the primary function maximum. This method considers results across all processes (ie, globally). Notes ----- This method must be called by collectively all processes. Returns ------- FunctionIO: value at global maximum. """ # first make sure that we have determined the rank with the max self.max_global() import underworld as uw # if we are the rank with the max result, extract result if uw.rank() == self.max_rank(): auxout = self.max_local_auxiliary() else: auxout = None from mpi4py import MPI comm = MPI.COMM_WORLD # broadcast data = comm.bcast(auxout, root=self.max_rank()) return data
def print_stats(self): purple = "\033[0;35m" endcol = "\033[00m" boldpurple = "\033[1;35m" if 0 == uw.rank(): print boldpurple print(" ") print("Pressure iterations: %3d" % (self._cself.stats.pressure_its)) print("Velocity iterations: %3d (presolve) " % (self._cself.stats.velocity_presolve_its)) print("Velocity iterations: %3d (pressure solve)" % (self._cself.stats.velocity_pressuresolve_its)) print("Velocity iterations: %3d (backsolve) " % (self._cself.stats.velocity_backsolve_its)) print("Velocity iterations: %3d (total solve) " % (self._cself.stats.velocity_total_its)) print(" ") print("SCR RHS solve time: %.4e" % (self._cself.stats.velocity_presolve_time)) print("Pressure solve time: %.4e" % (self._cself.stats.pressure_time)) print("Velocity solve time: %.4e (backsolve)" % (self._cself.stats.velocity_backsolve_time)) print("Total solve time : %.4e" % (self._cself.stats.total_time)) print(" ") print("Velocity solution min/max: %.4e/%.4e" % (self._cself.stats.vmin, self._cself.stats.vmax)) print("Pressure solution min/max: %.4e/%.4e" % (self._cself.stats.pmin, self._cself.stats.pmax)) print(" ") print endcol
def update_values(): """ Assumes global variables: * time * step ... + many functions """ #save the time and step valuesDict.timeAtSave.append(time) valuesDict.stepAtSave.append(step) for e in tm.undirected.edges(): if tm.is_subduction_boundary(e): ee = tm.subduction_edge_order( e ) #hacky workaround for the directed/ undireted. need get_bound_loc else: ee = e valuesDict[str(e)].append(tm.get_bound_loc(ee)) #save if uw.rank() == 0: fullpath = os.path.join(outputPath + "tect_model_data") #the '**' is important np.savez(fullpath, **valuesDict)
def show(self, type="Image"): """ Shows the generated image inline within an ipython notebook. Parameters ---------- type: str Type of visualisation to display ('Image' or 'WebGL'). If IPython is installed, displays the result image or WebGL content inline If IPython is not installed, this method will call the default image/web output routines to save the result with a default filename in the current directory """ try: if type.lower() != "webgl" and lavavu.is_notebook(): self._generate_DB() if uw.rank() > 0: return from IPython.display import display,Image,HTML #Return inline image result filename = self._generate_image() display(HTML("<img src='%s'>" % filename)) else: #Fallback to export image or call viewer webgl export self.save(filename=self.name, type=type) except RuntimeError as e: print("Error creating image: ", e) pass except: raise
def send_command(self, cmd, retry=True): """ Run command on an open viewer instance. Parameters ---------- cmd: str Command to send to open viewer. """ if uw.rank() == 0: self.open_viewer() url = "http://localhost:9999/command=" + urllib2.quote(cmd) try: #print url response = urllib2.urlopen(url).read() #print response except: print("Send command '" + cmd + "' failed, no response") if retry: #Wait a few seconds so server has time to start then try again print("... retrying in 1s ...") time.sleep(1) self.send_command(cmd, False) else: print("... failed, skipping ...") pass
def rc_params_from_file(fname, fail_on_error=False, use_default_template=True): """Return :class:`matplotlib.RcParams` from the contents of the given file. Parameters ---------- fname : str Name of file parsed for matplotlib settings. fail_on_error : bool If True, raise an error when the parser fails to convert a parameter. use_default_template : bool If True, initialize with default parameters before updating with those in the given file. If False, the configuration class only contains the parameters specified in the file. (Useful for updating dicts.) """ config_from_file = _rc_params_in_file(fname, fail_on_error) if not use_default_template: return config_from_file iter_params = six.iteritems(defaultParams) config = RcParams([(key, default) for key, (default, _) in iter_params if key not in _all_deprecated]) config.update(config_from_file) if underworld.rank() == 0: print('loaded rc file %s' % fname) return config
def print_stats(self): if 0==uw.rank(): print( "Pressure iterations: %d" % (self._cself.stats.pressure_its) ) print( "Velocity iterations: %d (backsolve)" % (self._cself.stats.velocity_backsolve_its) ) print( " " ) print( "Pressure solve time: %.4e" %(self._cself.stats.pressure_time) ) print( "Velocity solve time: %.4e (backsolve)" %(self._cself.stats.velocity_backsolve_time) ) print( "Total solve time : %.4e" %(self._cself.stats.total_time) ) print( " " ) print( "Velocity solution min/max: %.4e/%.4e" % (self._cself.stats.vmin,self._cself.stats.vmax) ) print( "Pressure solution min/max: %.4e/%.4e" % (self._cself.stats.pmin,self._cself.stats.pmax) )
def _generate_image(self, asfile=False): if uw.rank() == 0: #Render with viewer args = [self._lvbin, self._db.path, "-" + str(self._db.timeStep), "-p0", "-z" + str(self.antialias)] if asfile: starting_directory = os.getcwd() lavavu.initViewer(args + ["-I", ":"] + self._script) else: imagestr = lavavu.initViewer(args + ["-u", ":"] + self._script) from IPython.display import Image,HTML return HTML("<img src='%s'>" % imagestr)
def viewer(self): """ Open the inline viewer. """ #Open viewer instance global lavavu if lavavu and uw.rank() == 0: #Generate db if doesn't exist if not self.db._db.path: self._generate_DB() v = self.db.lvrun() v.window() return v
def viewer(self): """ Open the inline viewer. """ fname = self.db.filename if not fname: fname = os.path.join(tmpdir, "gluciferDB" + self.db._id + ".gldb") self.save_database(fname) global lavavu if lavavu and uw.rank() == 0: lavavu.viewer = self.db.lvrun(db=fname) lavavu.control.viewer() return lavavu.viewer
def _generate_HTML(self): if uw.rank() == 0: #Export encoded json string jsonstr = lavavu.initViewer([self._lvbin, "-" + str(self._db.timeStep), "-U", "-p0", self._db.path, ":"] + self._script) if not os.path.isdir("html"): #Create link to web content directory os.symlink(self._lvpath + 'html', 'html') text_file = open("html/input.json", "w") text_file.write(jsonstr); text_file.close() from IPython.display import IFrame return IFrame("html/index.html#input.json", width=1000, height=800) return ""
def set_penalty(self, penalty): """ By setting the penalty, the Augmented Lagrangian Method is used as the solve. This method is not recommended for normal use as there is additional memory and cpu overhead. This method can often help improve convergence issues for subduction-type problems with large viscosity contrasts that are having trouble converging. A penalty of roughly 0.1 of the maximum viscosity contrast is not a bad place to start as a guess. (check notes/paper) """ if isinstance(self.options.main.penalty, float) and self.options.main.penalty >= 0.0: self.options.main.penalty=penalty elif 0==uw.rank(): print( "Invalid penalty number chosen. Penalty must be a positive float." )
def _generate_image(self, filename="", size=(0, 0)): global lavavu if not lavavu or uw.rank() > 0: return try: # Render with viewer lv = self.db.lvrun(quality=self.quality, script=self._script) imagestr = lv.image(filename, size[0], size[1]) # Return the generated filename return imagestr except RuntimeError, e: print "LavaVu error: " + str(e) pass
def start(): """ Call this function to start recording timing data. """ depth = 0 global timing global _maxdepth global _currentDepth _currentDepth = 0 if _uw.rank() == 0 and ("UW_ENABLE_TIMING" in _os.environ): timing = True _maxdepth = depth + 1 global _starttime _starttime = _time.time()
def window(self, *args, **kwargs): """ Open an inline viewer. This returns a new LavaVu instance to display the figure and opens it as an interactive viewing window. """ #Open a new viewer instance and display window if uw.rank() == 0: v = self.viewer(new=True, *args, **kwargs) #Ensure correct figure selected v.figure(self.name) #Show the inline window, v.window() return v
def start(): """ Call this function to start recording timing data. """ depth=0 global timing global _maxdepth global _currentDepth _currentDepth = 0 if _uw.rank() == 0 and ("UW_ENABLE_TIMING" in _os.environ): timing = True _maxdepth = depth + 1 global _starttime _starttime = _time.time()
def _generate_image(self, filename="", size=(0, 0)): if uw.rank() > 0: return try: #Render with viewer lv = self.db.lvget(quality=self["quality"], script=self._script) imagestr = lv.image(filename, resolution=size) #Return the generated filename return imagestr except RuntimeError, e: print "LavaVu error: " + str(e) import traceback traceback.print_exc() pass
def set_penalty(self, penalty): """ By setting the penalty, the Augmented Lagrangian Method is used as the solve. This method is not recommended for normal use as there is additional memory and cpu overhead. This method can often help improve convergence issues for problems with large viscosity contrasts that are having trouble converging. A penalty of roughly 0.1 of the maximum viscosity contrast is not a bad place to start as a guess. (check notes/paper) """ if isinstance(self.options.main.penalty, float) and self.options.main.penalty >= 0.0: self.options.main.penalty=penalty self.options.main.Q22_pc_type="gkgdiag" elif 0==uw.rank(): print( "Invalid penalty number chosen. Penalty must be a positive float." )
def open_viewer(self, args=[]): """ Open the viewer. """ if uw.rank() == 0: fname = os.path.join(tmpdir,"gluciferDB"+self._id+".gldb") self.save_database(fname) if self._viewerProc and self._viewerProc.poll() == None: return #Open viewer with local web server for interactive/iterative use args = [self._lvbin, "-" + str(self._db.timeStep), "-L", "-p8080", "-q90", "-Q", fname] + args self._viewerProc = subprocess.Popen(args, stdout=PIPE, stdin=PIPE, stderr=STDOUT) from IPython.display import HTML return HTML('''<a href='#' onclick='window.open("http://" + location.hostname + ":8080");'>Open Viewer Interface</a>''')
def _generate_image(self, filename="", size=(0,0)): if uw.rank() > 0: return try: #Render with viewer lv = self.db.lvget(quality=self["quality"], script=self._script) imagestr = lv.image(filename, resolution=size) #Return the generated filename return imagestr except RuntimeError as e: print("LavaVu error: ", e) import traceback traceback.print_exc() pass return ""
def _generate_image(self, filename="", size=(0,0)): global lavavu if not lavavu or uw.rank() > 0: return try: #Render with viewer lv = self.db.lvrun(quality=self["quality"], script=self._script) imagestr = lv.image(filename, size[0], size[1]) #Return the generated filename return imagestr except RuntimeError,e: print "LavaVu error: " + str(e) import traceback traceback.print_exc() pass
def __init__(self, swarm, particlesPerCell, **kwargs ): import underworld as uw if uw.rank()==0: # TODO: Deprecate import warnings warnings.warn("Note that the 'GlobalSpaceFillerLayout' will be deprecated in future releases of Underworld. " "The `PerCellSpaceFillerLayout` provides similar functionality.") if not isinstance(particlesPerCell, (int,float)): raise TypeError("'particlesPerCell' object passed in must be of type 'float' or 'int'.") if particlesPerCell<=0: raise ValueError("'particlesPerCell' object passed in must take a value greater than zero.") self._particlesPerCell = float(particlesPerCell) # build parent super(GlobalSpaceFillerLayout,self).__init__(swarm=swarm, **kwargs)
def save(self,filename): """ Saves the database to the provided filename. Parameters ---------- filename :str Filename to save file to. May include an absolute or relative path. """ if uw.rank() == 0: if not isinstance(filename, str): raise TypeError("Provided parameter 'filename' must be of type 'str'. ") if not filename.lower().endswith('.gldb') and not filename.lower().endswith('.db'): filename += '.gldb' libUnderworld.gLucifer.lucDatabase_BackupDbFile(self._db, filename) return filename
def save(self, filename): """ Saves the database to the provided filename. Parameters ---------- filename :str Filename to save file to. May include an absolute or relative path. """ if uw.rank() == 0: if not isinstance(filename, str): raise TypeError("Provided parameter 'filename' must be of type 'str'. ") if not filename.lower().endswith(".gldb") and not filename.lower().endswith(".db"): filename += ".gldb" libUnderworld.gLucifer.lucDatabase_BackupDbFile(self._db, filename) return filename
def print_stats(self): if 0 == uw.rank(): print("Pressure iterations: %d" % (self._cself.stats.pressure_its)) print("Velocity iterations: %d (backsolve)" % (self._cself.stats.velocity_backsolve_its)) print(" ") print("Pressure solve time: %.4e" % (self._cself.stats.pressure_time)) print("Velocity solve time: %.4e (backsolve)" % (self._cself.stats.velocity_backsolve_time)) print("Total solve time : %.4e" % (self._cself.stats.total_time)) print(" ") print("Velocity solution min/max: %.4e/%.4e" % (self._cself.stats.vmin, self._cself.stats.vmax)) print("Pressure solution min/max: %.4e/%.4e" % (self._cself.stats.pmin, self._cself.stats.pmax))
def _read_state(self): # Read state from database global lavavu if not lavavu or uw.rank() > 0: return if not self._db.db: libUnderworld.gLucifer.lucDatabase_OpenDatabase(self._db) try: lv = self.lvrun() # Get state, includes the list of objects in the loaded database statestr = lv.getFigures() # Also save the step data self.timesteps = json.loads(lv.getTimeSteps()) return json.loads(statestr) except RuntimeError, e: print "LavaVu error: " + str(e) pass
def _read_state(self): #Read state from database (DEPRECATED) if uw.rank() > 0: return if not self._db.db: libUnderworld.gLucifer.lucDatabase_OpenDatabase(self._db) try: lv = self.lvget() #Also save the step data self.timesteps = json.loads(lv.app.getTimeSteps()) #Get figures/states return lv.app.figures except RuntimeError as e: print("LavaVu error: " + str(e)) import traceback traceback.print_exc() pass
def swarm_save_load(): ''' This test simply creates a swarm & variable, saves them, then loads it into another swarm and checks for equality. ''' mesh = uw.mesh.FeMesh_Cartesian(elementType='Q1/dQ0', elementRes=(16, 16), minCoord=(0., 0.), maxCoord=(1., 1.)) swarm = uw.swarm.Swarm(mesh) swarm.populate_using_layout(uw.swarm.layouts.PerCellGaussLayout(swarm, 2)) svar = swarm.add_variable("int", 1) # Write something to variable import numpy as np svar.data[:, 0] = np.arange(swarm.particleLocalCount) # Save to a file: swarm.save("saved_swarm.h5") svar.save("saved_swarm_variable.h5") # Now let's try and reload. First create an empty swarm, and then load: clone_swarm = uw.swarm.Swarm(mesh) clone_swarm.load("saved_swarm.h5") clone_svar = clone_swarm.add_variable("int", 1) clone_svar.load("saved_swarm_variable.h5") # Now check for equality: import numpy as np if np.allclose(swarm.particleCoordinates.data, clone_swarm.particleCoordinates.data) != True: raise RuntimeError( "Loaded swarm does not appear to be identical to saved swarm.") if np.allclose(svar.data, clone_svar.data) != True: raise RuntimeError( "Loaded swarm variable does not appear to be identical to saved swarm." ) # Clean up: if uw.rank() == 0: import os os.remove("saved_swarm.h5") os.remove("saved_swarm_variable.h5")
def get_data(group_by="line_routine"): """ Returns dict with timing data. Parameters ---------- group_by: str Reported timing data is grouped according to the following options: "line" : Calling line of code. "routine" : Class routine. "line_routine": Line&routine form an individual timing group. """ if _uw.rank() != 0: return # function to convert key into useful text def linefunc(key): if key[1].startswith("<ipython-input-"): spltstr = key[1].split("-") no_cell = int(spltstr[2]) no_line = key[2] return "Cell: {:>3} Line:{:>3}".format(no_cell, no_line) else: return "{}:{:>5}".format(key[1], key[2]) if group_by == "line": keyfunc = linefunc elif group_by == "routine": keyfunc = lambda key: key[0] elif group_by == "line_routine": keyfunc = lambda key: "{} {}".format(linefunc(key), key[0]) else: raise ValueError( "'group_by' parameter should specify 'line', 'routine' 'line_routine'" ) # regroup data regrouped_dict = _dd(lambda: [0, 0.]) for key, value in _hit_count.iteritems(): data = regrouped_dict[keyfunc(key)] data[0] += value[0] data[1] += value[1] return regrouped_dict
def swarm_save_load(): ''' This test simply creates a swarm & variable, saves them, then loads it into another swarm and checks for equality. ''' mesh = uw.mesh.FeMesh_Cartesian( elementType='Q1/dQ0', elementRes=(16,16), minCoord=(0.,0.), maxCoord=(1.,1.) ) swarm = uw.swarm.Swarm(mesh) swarm.populate_using_layout(uw.swarm.layouts.PerCellGaussLayout(swarm,2)) svar1 = swarm.add_variable("double",2) svar2 = swarm.add_variable("int",1) # Write the positions to the variable svar1.data[:] = swarm.particleCoordinates.data[:] # write the rounded particle coords svar2.data[:,0] = (1000.*swarm.particleCoordinates.data[:,0]).astype(int) # Save to a file: swarm.save("saved_swarm.h5") svar1.save("saved_swarm_variable1.h5") svar2.save("saved_swarm_variable2.h5") # Now let's try and reload. First create an empty swarm, and then load: clone_swarm = uw.swarm.Swarm(mesh) clone_swarm.load( "saved_swarm.h5" ) # check it has required particle count globcount = clone_swarm.particleGlobalCount if globcount != 16*16*4: raise RuntimeError("Reloaded swarm appears has {} particles but {} were expected.".format(globcount,16*16*4)) # reload recorded positions var clone_svar1 = clone_swarm.add_variable("double",2) clone_svar1.load("saved_swarm_variable1.h5") if np.allclose(clone_swarm.particleCoordinates.data,clone_svar1.data) != True: raise RuntimeError("Loaded swarm variable1 does not appear to contain the correct data.") clone_svar2 = clone_swarm.add_variable("int",1) clone_svar2.load("saved_swarm_variable2.h5") if np.allclose(clone_svar2.data[:,0],(1000.*clone_swarm.particleCoordinates.data[:,0]).astype(int)) != True: raise RuntimeError("Loaded swarm variable2 does not appear to contain correct data.") # Clean up: if uw.rank() == 0: import os; os.remove( "saved_swarm.h5" ); os.remove( "saved_swarm_variable1.h5" ); os.remove( "saved_swarm_variable2.h5" );
def update_particle_owners(self): """ This routine will update particles owners after particles have been moved. This is both in terms of the cell/element the the particle resides within, and also in terms of the parallel processor decomposition (particles belonging on other processors will be sent across). Users should not generally need to call this as it will be called automatically at the conclusion of a deform_swarm() block. Notes ----- This method must be called collectively by all processes. Example ------- >>> mesh = uw.mesh.FeMesh_Cartesian( elementType='Q1/dQ0', elementRes=(16,16), minCoord=(0.,0.), maxCoord=(1.,1.) ) >>> swarm = uw.swarm.Swarm(mesh) >>> swarm.populate_using_layout(uw.swarm.layouts.PerCellGaussLayout(swarm,2)) >>> swarm.data[0] array([ 0.0132078, 0.0132078]) >>> swarm.owningCell.data[0] array([0], dtype=int32) >>> with swarm.deform_swarm(): ... swarm.data[0] = [0.1,0.1] >>> swarm.owningCell.data[0] array([17], dtype=int32) """ orig_total_particles = self.particleGlobalCount libUnderworld.StgDomain.Swarm_UpdateAllParticleOwners(self._cself) libUnderworld.PICellerator.EscapedRoutine_RemoveFromSwarm( self._escapedRoutine, self._cself) new_total_particles = self.particleGlobalCount if (uw.rank() == 0) and (not self.particleEscape) and ( orig_total_particles != new_total_particles): raise RuntimeError( "Particles appear to have left the domain, but swarm flag `particleEscape` is False. " "Check your velocity field or your particle relocation routines, or set the " "`particleEscape` swarm constructor parameter to True to allow escape." ) libUnderworld.PICellerator.GeneralSwarm_ClearSwarmMaps(self._cself) self._toggle_state()
def show(self, type="image"): """ Shows the generated image inline within an ipython notebook. Parameters ---------- type: str Type of visualisation to display ('Image' or 'WebGL'). Default is 'Image'. If IPython is installed, displays the result image or WebGL content inline If IPython is not installed, this method will call the default image/web output routines to save the result with a default filename in the current directory """ self._generate_DB() global lavavu if not lavavu or uw.rank() > 0: return try: if __IPYTHON__: from IPython.display import display, Image, HTML if type.lower() == "webgl": display(self._generate_HTML()) else: # Return inline image result filename = self._generate_image() display(HTML("<img src='%s'>" % filename)) except NameError, ImportError: # Not in IPython, call default image save routines (autogenerated filenames) try: if type.lower() == "webgl": lv = self.db.lvrun() lv.web(True) else: # -1 selects last figure/state in list lv = self.db.lvrun( figure=-1, quality=self.quality, writeimage=True, res=self["resolution"], script=self._script ) except RuntimeError, e: print "LavaVu error: " + str(e) pass
def save_database(self,filename,regen=True): """ Saves the generated database to the provided filename. Parameters ---------- filename :str Filename to save file to. May include an absolute or relative path. regen :bool, default=True Regenerate the database, only required if show() has not been called previously. """ if regen: self._generate_DB() if uw.rank() == 0: if not isinstance(filename, str): raise TypeError("Provided parameter 'filename' must be of type 'str'. ") if not filename.lower().endswith('.gldb'): filename += '.gldb' libUnderworld.gLucifer.lucDatabase_BackupDbFile(self._db, filename)
def viewer(self, new=False, *args, **kwargs): """ Return viewer instance. Parameters ---------- new: boolean If True, a new viewer instance will always be returned Otherwise the existing instance will be used if available """ #Open/get viewer instance if uw.rank() == 0: #Generate db if doesn't exist if not self.db._db.path: self._generate_DB() #Get a viewer instance, if new requested always run a new one if new: return self.db.lvrun(*args, **kwargs) else: return self.db.lvget(*args, **kwargs)
def initialTempFn(temperatureField, mesh, tempRange=(0., 1.), pertStrength=0.2): temperatureField.data[:] = 0. tempMin, tempMax = tempRange deltaTemp = tempMax - tempMin boxHeight = 1.0 for index, coord in enumerate(mesh.data): pertCoeff = math.cos(math.pi * coord[0]) * math.sin(math.pi * coord[1]) temperatureField.data[index] = tempMin + deltaTemp * ( boxHeight - coord[1]) + pertStrength * pertCoeff temperatureField.data[index] = max( tempMin, min(tempMax, temperatureField.data[index])) for index in mesh.specialSets["MinJ_VertexSet"]: temperatureField.data[index] = tempMax for index in mesh.specialSets["MaxJ_VertexSet"]: temperatureField.data[index] = tempMin if uw.rank() == 0: print "Sinusoidal initial temperature function applied."
def get_data(group_by="line_routine"): """ Returns dict with timing data. Parameters ---------- group_by: str Reported timing data is grouped according to the following options: "line" : Calling line of code. "routine" : Class routine. "line_routine": Line&routine form an individual timing group. """ if _uw.rank() != 0: return # function to convert key into useful text def linefunc( key ): if key[1].startswith("<ipython-input-"): spltstr = key[1].split("-") no_cell = int(spltstr[2]) no_line = key[2] return "Cell: {:>3} Line:{:>3}".format(no_cell,no_line) else: return "{}:{:>5}".format(key[1],key[2]) if group_by == "line": keyfunc = linefunc elif group_by == "routine": keyfunc = lambda key : key[0] elif group_by == "line_routine": keyfunc = lambda key : "{} {}".format(linefunc(key),key[0]) else: raise ValueError("'group_by' parameter should specify 'line', 'routine' 'line_routine'" ) # regroup data regrouped_dict = _dd(lambda: [0,0.]) for key, value in _hit_count.iteritems(): data = regrouped_dict[keyfunc(key)] data[0] += value[0] data[1] += value[1] return regrouped_dict
def update_particle_owners(self): """ This routine will update particles owners after particles have been moved. This is both in terms of the cell/element the the particle resides within, and also in terms of the parallel processor decomposition (particles belonging on other processors will be sent across). Users should not generally need to call this as it will be called automatically at the conclusion of a deform_swarm() block. Notes ----- This method must be called collectively by all processes. Example ------- >>> mesh = uw.mesh.FeMesh_Cartesian( elementType='Q1/dQ0', elementRes=(16,16), minCoord=(0.,0.), maxCoord=(1.,1.) ) >>> swarm = uw.swarm.Swarm(mesh) >>> swarm.populate_using_layout(uw.swarm.layouts.PerCellGaussLayout(swarm,2)) >>> swarm.data[0] array([ 0.0132078, 0.0132078]) >>> swarm.owningCell.data[0] array([0], dtype=int32) >>> with swarm.deform_swarm(): ... swarm.data[0] = [0.1,0.1] >>> swarm.owningCell.data[0] array([17], dtype=int32) """ orig_total_particles = self.particleGlobalCount libUnderworld.StgDomain.Swarm_UpdateAllParticleOwners( self._cself ); libUnderworld.PICellerator.EscapedRoutine_RemoveFromSwarm(self._escapedRoutine, self._cself) new_total_particles = self.particleGlobalCount if (uw.rank()==0) and (not self.particleEscape) and (orig_total_particles != new_total_particles): raise RuntimeError("Particles appear to have left the domain, but swarm flag `particleEscape` is False. " "Check your velocity field or your particle relocation routines, or set the " "`particleEscape` swarm constructor parameter to True to allow escape.") libUnderworld.PICellerator.GeneralSwarm_ClearSwarmMaps( self._cself ); self._toggle_state()
def save(self, filename=None, size=(0,0), type="Image"): """ Saves the generated image to the provided filename or the figure to the database. Parameters ---------- filename :str Filename to save file to. May include an absolute or relative path. size (tuple(int,int)): size of image in pixels, defaults to original figsize setting If omitted, simply saves the figure data without generating an image type: str Type of visualisation to save ('Image' or 'WebGL'). Returns ------- filename: str The final filename (including extension) used to save the image will be returned. Note that only the root process will return this filename. All other processes will not return anything. """ self._generate_DB() if filename is None or uw.rank() > 0: return if not isinstance(filename, str): raise TypeError("Provided parameter 'filename' must be of type 'str'. ") if size and not isinstance(size,tuple): raise TypeError("'size' object passed in must be of python type 'tuple'") try: if type.lower() == "webgl": lv = self.db.lvget(script=self._script) return lv.webgl(filename + '.html') else: return self._generate_image(filename, size) except RuntimeError as e: print("LavaVu error: ", e) import traceback traceback.print_exc() pass
def swarm_save_load(): ''' This test simply creates a swarm & variable, saves them, then loads it into another swarm and checks for equality. ''' mesh = uw.mesh.FeMesh_Cartesian( elementType='Q1/dQ0', elementRes=(16,16), minCoord=(0.,0.), maxCoord=(1.,1.) ) swarm = uw.swarm.Swarm(mesh) swarm.populate_using_layout(uw.swarm.layouts.PerCellGaussLayout(swarm,2)) svar = swarm.add_variable("int",1) # Write something to variable import numpy as np svar.data[:,0] = np.arange(swarm.particleLocalCount) # Save to a file: swarm.save("saved_swarm.h5") svar.save("saved_swarm_variable.h5") # Now let's try and reload. First create an empty swarm, and then load: clone_swarm = uw.swarm.Swarm(mesh) clone_swarm.load( "saved_swarm.h5" ) clone_svar = clone_swarm.add_variable("int",1) clone_svar.load("saved_swarm_variable.h5") # Now check for equality: import numpy as np if np.allclose(swarm.particleCoordinates.data,clone_swarm.particleCoordinates.data) != True: raise RuntimeError("Loaded swarm does not appear to be identical to saved swarm.") if np.allclose(svar.data,clone_svar.data) != True: raise RuntimeError("Loaded swarm variable does not appear to be identical to saved swarm.") # Clean up: if uw.rank() == 0: import os; os.remove( "saved_swarm.h5" ); os.remove( "saved_swarm_variable.h5" );
def _generate_HTML(self): global lavavu if not lavavu or uw.rank() > 0: return try: # Export encoded json string lv = self.db.lvrun(script=self._script) # Create link to web content directory if not os.path.isdir("html"): os.symlink(os.path.join(self.db._lvpath, "html"), "html") jsonstr = lv.web() # Write files to disk first, can be passed directly on url but is slow for large datasets filename = "input_" + self.db._db.name + ".json" text_file = open("html/" + filename, "w") text_file.write(jsonstr) text_file.close() from IPython.display import IFrame return IFrame("html/index.html#" + filename, width=self["resolution"][0], height=self["resolution"][1]) # import base64 # return IFrame("html/index.html#" + base64.b64encode(jsonstr), width=self["resolution"][0], height=self["resolution"][1]) except RuntimeError, e: print "LavaVu error: " + str(e) pass